text stringlengths 2.5k 6.39M | kind stringclasses 3
values |
|---|---|
import * as tf from '@tensorflow/tfjs-core';
import {fetch, performance} from '../core/compat/global';
import * as logging from '../core/logging';
import * as sequences from '../core/sequences';
import {INoteSequence} from '../protobuf/index';
import {IS_IOS, NUM_PITCHES, pianorollToSequence, sequenceToPianoroll} from './coconet_utils';
/**
* An interface for providing an infilling mask.
* @param step The quantized time step at which to infill.
* @param voice The voice to infill at the time step.
*/
interface InfillMask {
step: number;
voice: number;
}
/**
* An interface for providing configurable properties to a Coconet model.
* @param temperature (Optional) The softmax temperature to use when sampling
* from the logits. Default is 0.99.
* @param numIterations (Optional) The number of Gibbs sampling iterations
* before the final output. Fewer iterations will be faster but have poorer
* results; more iterations will be slower but have better results.
* @param infillMask (Optional) Array of timesteps at which the model should
* infill notes. The array should contain pairs of the form
* `{step: number, voice: number}`, indicating which voice should be
* infilled for a particular step. If this value isn't provided, then the model
* will attempt to infill all the "silent" steps in the input sequence.
*/
interface CoconetConfig {
temperature?: number;
numIterations?: number;
infillMask?: InfillMask[];
}
interface LayerSpec {
pooling?: number[];
filters?: number[];
activation?: string;
dilation?: number[];
activeNoteRGB?: string;
minPitch?: number;
maxPitch?: number;
poolPad?: 'same'|'valid';
convPad?: string;
convStride?: number;
}
interface ModelSpec {
useSoftmaxLoss: boolean;
batchNormVarianceEpsilon: number;
numInstruments: number;
numFilters: number;
numLayers: number;
numRegularConvLayers: number;
dilation?: number[][];
layers: LayerSpec[];
interleaveSplitEveryNLayers?: number;
numPointwiseSplits?: number;
}
const DEFAULT_SPEC: ModelSpec = {
useSoftmaxLoss: true,
batchNormVarianceEpsilon: 1.0e-07,
numInstruments: 4,
numFilters: 128,
numLayers: 33,
numRegularConvLayers: 0,
dilation: [
[1, 1], [2, 2], [4, 4], [8, 8], [16, 16], [16, 32],
[1, 1], [2, 2], [4, 4], [8, 8], [16, 16], [16, 32],
[1, 1], [2, 2], [4, 4], [8, 8], [16, 16], [16, 32],
[1, 1], [2, 2], [4, 4], [8, 8], [16, 16], [16, 32],
[1, 1], [2, 2], [4, 4], [8, 8], [16, 16], [16, 32]
],
layers: null,
interleaveSplitEveryNLayers: 16,
numPointwiseSplits: 4,
};
class ConvNet {
private residualPeriod = 2;
private outputForResidual: tf.Tensor = null;
private residualCounter = -1;
private spec: ModelSpec;
// Save for disposal.
private rawVars: {[varName: string]: tf.Tensor} = null;
constructor(spec: ModelSpec, vars: {[varName: string]: tf.Tensor}) {
this.spec = spec;
this.rawVars = vars;
}
dispose() {
if (this.rawVars !== null) {
tf.dispose(this.rawVars);
}
if (this.outputForResidual) {
this.outputForResidual.dispose();
}
}
public predictFromPianoroll(pianoroll: tf.Tensor4D, masks: tf.Tensor4D):
tf.Tensor {
return tf.tidy(() => {
let featuremaps = this.getConvnetInput(pianoroll, masks);
const n = this.spec.layers.length;
for (let i = 0; i < n; i++) {
this.residualCounter += 1;
this.residualSave(featuremaps);
let numPointwiseSplits = null;
if (this.spec.interleaveSplitEveryNLayers && i > 0 && i < n - 2 &&
i % (this.spec.interleaveSplitEveryNLayers + 1) === 0) {
numPointwiseSplits = this.spec.numPointwiseSplits;
}
featuremaps = this.applyConvolution(
featuremaps, this.spec.layers[i], i,
i >= this.spec.numRegularConvLayers, numPointwiseSplits);
featuremaps = this.applyResidual(featuremaps, i === 0, i === n - 1, i);
featuremaps = this.applyActivation(featuremaps, this.spec.layers[i], i);
featuremaps = this.applyPooling(featuremaps, this.spec.layers[i], i);
}
return this.computePredictions(featuremaps);
});
}
private computePredictions(logits: tf.Tensor): tf.Tensor {
if (this.spec.useSoftmaxLoss) {
return logits.transpose([0, 1, 3, 2]).softmax().transpose([0, 1, 3, 2]);
}
return logits.sigmoid();
}
private residualReset() {
this.outputForResidual = null;
this.residualCounter = 0;
}
private residualSave(x: tf.Tensor) {
if (this.residualCounter % this.residualPeriod === 1) {
this.outputForResidual = x;
}
}
private applyResidual(
x: tf.Tensor4D, isFirst: boolean, isLast: boolean,
i: number): tf.Tensor4D {
if (this.outputForResidual == null) {
return x;
}
if (this.outputForResidual
.shape[this.outputForResidual.shape.length - 1] !==
x.shape[x.shape.length - 1]) {
this.residualReset();
return x;
}
if (this.residualCounter % this.residualPeriod === 0) {
if (!isFirst && !isLast) {
x = x.add(this.outputForResidual);
}
}
return x;
}
private getVar(name: string, layerNum: number): tf.Tensor4D {
const varname = `model/conv${layerNum}/${name}`;
return this.rawVars[varname] as tf.Tensor4D;
}
private getSepConvVar(name: string, layerNum: number): tf.Tensor4D {
const varname = `model/conv${layerNum}/SeparableConv2d/${name}`;
return this.rawVars[varname] as tf.Tensor4D;
}
private getPointwiseSplitVar(
name: string, layerNum: number, splitNum: number) {
// tslint:disable-next-line:max-line-length
const varname =
`model/conv${layerNum}/split_${layerNum}_${splitNum}/${name}`;
return this.rawVars[varname];
}
private applyConvolution(
x: tf.Tensor4D, layer: LayerSpec, i: number, depthwise: boolean,
numPointwiseSplits?: number): tf.Tensor4D {
if (layer.filters == null) {
return x;
}
const filterShape = layer.filters;
const stride = layer.convStride || 1;
const padding = layer.convPad ?
layer.convPad.toLowerCase() as 'same' | 'valid' :
'same';
let conv = null;
if (depthwise) {
const dWeights = this.getSepConvVar('depthwise_weights', i);
if (!numPointwiseSplits) {
const pWeights = this.getSepConvVar('pointwise_weights', i);
const biases = this.getSepConvVar('biases', i);
const sepConv = tf.separableConv2d(
x, dWeights, pWeights, [stride, stride], padding,
(layer.dilation as [number, number]), 'NHWC');
conv = sepConv.add(biases);
} else {
conv = tf.depthwiseConv2d(
x, dWeights, [stride, stride], padding, 'NHWC',
(layer.dilation as [number, number]));
const splits = tf.split(conv, numPointwiseSplits, conv.rank - 1);
const pointwiseSplits = [];
for (let splitIdx = 0; splitIdx < numPointwiseSplits; splitIdx++) {
const outputShape = filterShape[3] / numPointwiseSplits;
const weights = this.getPointwiseSplitVar('kernel', i, splitIdx);
const biases = this.getPointwiseSplitVar('bias', i, splitIdx);
const dot = tf.matMul(
splits[splitIdx].reshape([-1, outputShape]), weights, false,
false);
const bias = tf.add(dot, biases);
pointwiseSplits.push(bias.reshape([
splits[splitIdx].shape[0], splits[splitIdx].shape[1],
splits[splitIdx].shape[2], outputShape
]));
}
conv = tf.concat(pointwiseSplits, conv.rank - 1);
}
} else {
const weights = this.getVar('weights', i);
const stride = layer.convStride || 1;
const padding = layer.convPad ?
layer.convPad.toLowerCase() as 'same' | 'valid' :
'same';
conv = tf.conv2d(x, weights, [stride, stride], padding, 'NHWC', [1, 1]);
}
return this.applyBatchnorm(conv as tf.Tensor4D, i) as tf.Tensor4D;
}
private applyBatchnorm(x: tf.Tensor4D, i: number): tf.Tensor {
const gammas = this.getVar('gamma', i);
const betas = this.getVar('beta', i);
const mean = this.getVar('popmean', i);
const variance = this.getVar('popvariance', i);
if (IS_IOS) {
// iOS WebGL floats are 16-bit, and the variance is outside this range.
// This loads the variance to 32-bit floats in JS to compute batchnorm.
// This arraySync is OK because we don't use the variance anywhere,
// so it doesn't actually get uploaded to the GPU, so we don't
// continuously download it and upload it which is the problem with
// dataSync.
const v = variance.arraySync()[0][0][0];
const stdevs = tf.tensor(v.map(
(x: number) => Math.sqrt(x + this.spec.batchNormVarianceEpsilon)));
return x.sub(mean).mul(gammas.div(stdevs)).add(betas);
}
return tf.batchNorm(
x, tf.squeeze(mean), tf.squeeze(variance), tf.squeeze(betas),
tf.squeeze(gammas), this.spec.batchNormVarianceEpsilon);
}
private applyActivation(x: tf.Tensor4D, layer: LayerSpec, i: number):
tf.Tensor4D {
if (layer.activation === 'identity') {
return x;
}
return x.relu();
}
private applyPooling(x: tf.Tensor4D, layer: LayerSpec, i: number):
tf.Tensor4D {
if (layer.pooling == null) {
return x;
}
const pooling = layer.pooling;
const padding = layer.poolPad ?
layer.poolPad.toLowerCase() as 'same' | 'valid' :
'same';
return tf.maxPool(
x, [pooling[0], pooling[1]], [pooling[0], pooling[1]], padding);
}
private getConvnetInput(pianoroll: tf.Tensor4D, masks: tf.Tensor4D):
tf.Tensor4D {
pianoroll = tf.scalar(1, 'float32').sub(masks).mul(pianoroll);
masks = tf.scalar(1, 'float32').sub(masks);
return pianoroll.concat(masks, 3);
}
}
/**
* Coconet model implementation in TensorflowJS.
* Thanks to [James Wexler](https://github.com/jameswex) for the original
* implementation.
*/
class Coconet {
private checkpointURL: string;
private spec: ModelSpec = null;
private convnet: ConvNet;
private initialized = false;
/**
* `Coconet` constructor.
*
* @param checkpointURL Path to the checkpoint directory.
*/
constructor(checkpointURL: string) {
this.checkpointURL = checkpointURL;
this.spec = DEFAULT_SPEC;
}
/**
* Loads variables from the checkpoint and instantiates the model.
*/
async initialize() {
this.dispose();
const startTime = performance.now();
this.instantiateFromSpec();
const vars = await fetch(`${this.checkpointURL}/weights_manifest.json`)
.then((response) => response.json())
.then(
(manifest: tf.io.WeightsManifestConfig) =>
tf.io.loadWeights(manifest, this.checkpointURL));
this.convnet = new ConvNet(this.spec, vars);
this.initialized = true;
logging.logWithDuration('Initialized model', startTime, 'Coconet');
}
dispose() {
if (this.convnet) {
this.convnet.dispose();
}
this.initialized = false;
}
isInitialized() {
return this.initialized;
}
/**
* Sets up layer configuration from params
*/
instantiateFromSpec() {
// Outermost dimensions' sizes of the non-final layers in the network.
const nonFinalLayerFilterOuterSizes = 3;
// Outermost dimensions' sizes of the last two layers in the network.
const finalTwoLayersFilterOuterSizes = 2;
this.spec.layers = [];
// Set-up filter size of first convolutional layer.
this.spec.layers.push({
filters: [
nonFinalLayerFilterOuterSizes, nonFinalLayerFilterOuterSizes,
this.spec.numInstruments * 2, this.spec.numFilters
]
});
// Set-up filter sizes of middle convolutional layers.
for (let i = 0; i < this.spec.numLayers - 3; i++) {
this.spec.layers.push({
filters: [
nonFinalLayerFilterOuterSizes, nonFinalLayerFilterOuterSizes,
this.spec.numFilters, this.spec.numFilters
],
dilation: this.spec.dilation ? this.spec.dilation[i] : null
});
}
// Set-up filter size of penultimate convolutional layer.
this.spec.layers.push({
filters: [
finalTwoLayersFilterOuterSizes, finalTwoLayersFilterOuterSizes,
this.spec.numFilters, this.spec.numFilters
]
});
// Set-up filter size and activation of final convolutional layer.
this.spec.layers.push({
filters: [
finalTwoLayersFilterOuterSizes, finalTwoLayersFilterOuterSizes,
this.spec.numFilters, this.spec.numInstruments
],
activation: 'identity',
});
}
/**
* Use the model to generate a Bach-style 4-part harmony, conditioned on an
* input sequence. The notes in the input sequence should have the
* `instrument` property set corresponding to which voice the note belongs to:
* 0 for Soprano, 1 for Alto, 2 for Tenor and 3 for Bass.
*
* **Note**: regardless of the length of the notes in the original sequence,
* all the notes in the generated sequence will be 1 step long. If you want
* to clean up the sequence to consider consecutive notes for the same
* pitch and instruments as "held", you can call `mergeHeldNotes` on the
* result. This function will replace any of the existing voices with
* the output of the model. If you want to restore any of the original voices,
* you can call `replaceVoice` on the output, specifying which voice should be
* restored.
*
* @param sequence The sequence to infill. Must be quantized.
* @param config (Optional) Infill parameterers like temperature, the number
* of sampling iterations, or masks.
*/
async infill(sequence: INoteSequence, config?: CoconetConfig) {
sequences.assertIsRelativeQuantizedSequence(sequence);
if (sequence.notes.length === 0) {
throw new Error(
`NoteSequence ${sequence.id} does not have any notes to infill.`);
}
const numSteps = sequence.totalQuantizedSteps ||
sequence.notes[sequence.notes.length - 1].quantizedEndStep;
// Convert the sequence to a pianoroll.
const pianoroll = sequenceToPianoroll(sequence, numSteps);
// Figure out the sampling configuration.
let temperature = 0.99;
let numIterations = 96;
let outerMasks;
if (config) {
numIterations = config.numIterations || numIterations;
temperature = config.temperature || temperature;
outerMasks =
this.getCompletionMaskFromInput(config.infillMask, pianoroll);
} else {
outerMasks = this.getCompletionMask(pianoroll);
}
// Run sampling on the pianoroll.
const samples =
await this.run(pianoroll, numIterations, temperature, outerMasks);
// Convert the resulting pianoroll to a noteSequence.
const outputSequence = pianorollToSequence(samples, numSteps);
pianoroll.dispose();
samples.dispose();
outerMasks.dispose();
return outputSequence;
}
/**
* Runs sampling on pianorolls.
*/
private async run(
pianorolls: tf.Tensor4D, numSteps: number, temperature: number,
outerMasks: tf.Tensor4D): Promise<tf.Tensor4D> {
return this.gibbs(pianorolls, numSteps, temperature, outerMasks);
}
private getCompletionMaskFromInput(
masks: InfillMask[], pianorolls: tf.Tensor4D): tf.Tensor4D {
if (!masks) {
return this.getCompletionMask(pianorolls);
} else {
// Create a buffer to store the input.
const buffer = tf.buffer([pianorolls.shape[1], 4]);
for (let i = 0; i < masks.length; i++) {
buffer.set(1, masks[i].step, masks[i].voice);
}
// Expand that buffer to the right shape.
return tf.tidy(() => {
return buffer.toTensor()
.expandDims(1)
.tile([1, NUM_PITCHES, 1])
.expandDims(0) as tf.Tensor4D;
});
}
}
private getCompletionMask(pianorolls: tf.Tensor4D): tf.Tensor4D {
return tf.tidy(() => {
const isEmpty = pianorolls.sum(2, true).equal(tf.scalar(0, 'float32'));
// Explicit broadcasting.
return tf.cast(isEmpty, 'float32').add(tf.zerosLike(pianorolls));
});
}
private async gibbs(
pianorolls: tf.Tensor4D, numSteps: number, temperature: number,
outerMasks: tf.Tensor4D): Promise<tf.Tensor4D> {
const numStepsTensor = tf.scalar(numSteps, 'float32');
let pianoroll = pianorolls.clone();
for (let s = 0; s < numSteps; s++) {
const pm = this.yaoSchedule(s, numStepsTensor);
const innerMasks = this.bernoulliMask(pianoroll.shape, pm, outerMasks);
await tf.nextFrame();
const predictions = tf.tidy(() => {
return this.convnet.predictFromPianoroll(pianoroll, innerMasks);
}) as tf.Tensor4D;
await tf.nextFrame();
pianoroll = tf.tidy(() => {
const samples =
this.samplePredictions(predictions, temperature) as tf.Tensor4D;
const updatedPianorolls =
tf.where(tf.cast(innerMasks, 'bool'), samples, pianoroll);
pianoroll.dispose();
predictions.dispose();
innerMasks.dispose();
pm.dispose();
return updatedPianorolls;
});
await tf.nextFrame();
}
numStepsTensor.dispose();
return pianoroll;
}
private yaoSchedule(i: number, n: tf.Scalar) {
return tf.tidy(() => {
const pmin = tf.scalar(0.1, 'float32');
const pmax = tf.scalar(0.9, 'float32');
const alpha = tf.scalar(0.7, 'float32');
const wat = pmax.sub(pmin).mul(tf.scalar(i, 'float32')).div(n);
const secondArg = pmax.sub(wat).div(alpha);
return pmin.reshape([1]).concat(secondArg.reshape([1])).max();
});
}
private bernoulliMask(
shape: number[], pm: tf.Tensor, outerMasks: tf.Tensor4D): tf.Tensor4D {
return tf.tidy(() => {
const [bb, tt, pp, ii] = shape;
const probs = tf.tile(
tf.randomUniform([bb, tt, 1, ii], 0, 1, 'float32'), [1, 1, pp, 1]);
const masks = probs.less(pm);
return tf.cast(masks, 'float32').mul(outerMasks);
});
}
private samplePredictions(predictions: tf.Tensor4D, temperature: number):
tf.Tensor {
return tf.tidy(() => {
predictions = tf.pow(predictions, tf.scalar(1 / temperature, 'float32'));
const cmf = tf.cumsum(predictions, 2, false, false);
const totalMasses = cmf.slice(
[0, 0, cmf.shape[2] - 1, 0],
[cmf.shape[0], cmf.shape[1], 1, cmf.shape[3]]);
const u = tf.randomUniform(totalMasses.shape, 0, 1, 'float32');
const i = u.mul(totalMasses).less(cmf).argMax(2);
return tf.oneHot(i.flatten(), predictions.shape[2], 1, 0)
.reshape([
predictions.shape[0], predictions.shape[1], predictions.shape[3],
predictions.shape[2]
])
.transpose([0, 1, 3, 2]);
});
}
}
export {Coconet}; | the_stack |
import download = require('download');
import * as fs from 'fs-extra';
import * as path from 'path';
import { Constants } from '../../Constants';
import { getWorkspaceRoot, userSettings } from '../../helpers';
import { Output } from '../../Output';
import { ContractService } from '../../services';
import { Telemetry } from '../../TelemetryClient';
import calculateHash from './fileHashGenerator';
import { CurrentOpenZeppelinVersionLocation, InvalidOpenZeppelinVersionException } from './InvalidOpenZeppelinVersionException';
import {
IDownloadingResult,
IOZAsset,
IOZContractCategory,
IProjectMetadata,
OZAssetType,
OZContractValidated,
PromiseState,
} from './models';
import { OpenZeppelinManifest } from './OpenZeppelinManifest';
import { OpenZeppelinProjectJsonService } from './OpenZeppelinProjectJsonService';
const openZeppelinFolderName = 'openZeppelin';
const userTmpFolder = '.tmp';
export namespace OpenZeppelinService {
export function projectJsonExists(): boolean {
return getWorkspaceRoot(true) !== undefined
&& isFileExists(OpenZeppelinProjectJsonService.getProjectJsonPath());
}
export async function getCurrentOpenZeppelinVersionAsync(): Promise<string> {
// If version of OZ is exist in project.json then use it,
// otherwise use version of OZ equal 2.3.0, when project.json is exist.
// If project.json is not exist, then use version from user settings, otherwise last version for OZ.
const hasProjectJson = OpenZeppelinService.projectJsonExists();
let currentVersion: string;
if (hasProjectJson) {
const projectMetadata = await OpenZeppelinProjectJsonService.getProjectJson();
currentVersion = projectMetadata.openZeppelin.version || Constants.firstOZVersion;
} else {
const { defaultValue, userValue } =
await userSettings.getConfigurationAsync(Constants.userSettings.ozVersionUserSettingsKey);
currentVersion = userValue || defaultValue;
}
if (Constants.allOpenZeppelinVersions.indexOf(currentVersion) === -1) {
throw new InvalidOpenZeppelinVersionException(
currentVersion,
hasProjectJson
? CurrentOpenZeppelinVersionLocation.projectJson
: CurrentOpenZeppelinVersionLocation.userSettings,
Constants.openZeppelin.invalidVersionException);
}
return currentVersion;
}
export async function setVersionAsync(version: string, location: CurrentOpenZeppelinVersionLocation): Promise<void> {
if (location === CurrentOpenZeppelinVersionLocation.projectJson) {
await OpenZeppelinProjectJsonService.addVersionToProjectJsonAsync(version);
} else {
await userSettings.updateConfigurationAsync(Constants.userSettings.ozVersionUserSettingsKey, version);
}
}
export async function getLatestOpenZeppelinVersionAsync(): Promise<string> {
const { defaultValue } = await userSettings.getConfigurationAsync(Constants.userSettings.ozVersionUserSettingsKey);
return defaultValue;
}
export async function getAllDownloadedAssetsAsync(): Promise<IOZAsset[]> {
const projectMetadata = await OpenZeppelinProjectJsonService.getProjectJson();
return projectMetadata.openZeppelin.assets;
}
export async function downloadAssetsAsync(
baseUrl: string,
assets: IOZAsset[],
overwrite: boolean = false,
destinationFolder?: string)
: Promise<IDownloadingResult[]> {
return Promise.all(assets.map(async (asset) => {
const fileUrl = new URL(asset.name, baseUrl).toString();
const destinationFilePath = getAssetFullPath(destinationFolder || await getOpenZeppelinFolderPath(), asset);
const destinationDirPath = path.dirname(destinationFilePath);
if (fs.existsSync(destinationFilePath)) {
if (overwrite) {
await fs.chmod(destinationFilePath, 0o222); // reset r/o flag, this allows to overwrite
} else {
Output.outputLine(Constants.outputChannel.azureBlockchain, `${fileUrl} - Skipped`);
return { state: PromiseState.fileExisted, asset };
}
}
return download(fileUrl, destinationDirPath, { filename: path.basename(destinationFilePath) })
.then(async () => {
Output.outputLine(Constants.outputChannel.azureBlockchain, `${fileUrl} - OK`);
await fs.chmod(destinationFilePath, 0o444);
return { state: PromiseState.fulfilled, asset };
})
.catch(() => {
Output.outputLine(Constants.outputChannel.azureBlockchain, `${fileUrl} - Failed`);
return { state: PromiseState.rejected, asset };
});
}));
}
export async function updateProjectJsonAsync(version: string, category: IOZContractCategory, assets: IOZAsset[])
: Promise<void> {
let updatedProjectJson = await OpenZeppelinProjectJsonService
.addVersionToProjectJsonAsync(version, false);
updatedProjectJson = await OpenZeppelinProjectJsonService
.addCategoryToProjectJsonAsync(category, false, updatedProjectJson);
updatedProjectJson = await OpenZeppelinProjectJsonService
.addAssetsToProjectJsonAsync(assets, updatedProjectJson);
return OpenZeppelinProjectJsonService.storeProjectJsonAsync(updatedProjectJson);
}
export function assetHasContracts(asset: IOZAsset): boolean {
return (asset.type === OZAssetType.contract) || !!(asset.contracts && asset.contracts.length);
}
export function getContractsNamesFromAsset(asset: IOZAsset): string[] {
if (asset.type) {
return [getContractNameByAssetName(asset)];
}
if (asset.contracts && asset.contracts.length) {
return asset.contracts.map((contract) => contract);
}
return [];
}
export function getContractParametersFromAsset(asset: IOZAsset, contractName: string): string[] {
if (asset.requiredParameters) {
const parameters = asset.requiredParameters[contractName];
if (parameters) {
return parameters.reduce((arr: string[], param) => {
if (param.value) {
if (param.type === 'string') {
arr.push(`"${param.value}"`);
} else if (param.type.match(Constants.validationRegexps.types.simpleArray)) {
arr.push(getArrayParameterValue(param.value, param.type));
} else {
arr.push(param.value);
}
}
return arr;
}, []);
}
}
return [];
}
export function getContractNameByAssetName(asset: IOZAsset): string {
return path.parse(asset.name).name;
}
export async function getAssetsStatus(assets: IOZAsset[]): Promise<{ existing: IOZAsset[], missing: IOZAsset[] }> {
const openZeppelinSubfolder = await getOpenZeppelinFolderPath();
const assetsStatuses = assets.map((asset) => {
const assetPath = getAssetFullPath(openZeppelinSubfolder, asset);
return { asset, exists: fs.existsSync(assetPath) };
});
return {
existing: assetsStatuses.filter((status) => status.exists === true).map((status) => status.asset),
missing: assetsStatuses.filter((status) => status.exists === false).map((status) => status.asset),
};
}
export async function validateContractsAsync(): Promise<OZContractValidated[]> {
const openZeppelinSubfolder = await getOpenZeppelinFolderPath();
const userProjectMetadata = OpenZeppelinProjectJsonService.getProjectJson();
const ozContractsPaths = getOzContractsFromProjectMetadata(openZeppelinSubfolder, userProjectMetadata);
const validatedContracts = [];
for (const ozContractPath of ozContractsPaths) {
let validatedContract: OZContractValidated;
if (isFileExists(ozContractPath)) {
const originalHash = getOriginalHash(ozContractPath, openZeppelinSubfolder, userProjectMetadata);
const currentHash = await calculateHash(ozContractPath);
const isHashValid = originalHash === currentHash;
validatedContract = new OZContractValidated(ozContractPath, true, isHashValid);
} else {
validatedContract = new OZContractValidated(ozContractPath, false);
}
validatedContracts.push(validatedContract);
}
return validatedContracts;
}
export async function updateOpenZeppelinContractsAsync(newManifest: OpenZeppelinManifest): Promise<void> {
const userWorkspace = getWorkspaceRoot(true);
if (userWorkspace === undefined) {
return;
}
Telemetry.sendEvent('OpenZeppelinService.updateOpenZeppelinContractsAsync.started');
const userTmpFolderPath = path.join(userWorkspace, `${userTmpFolder}${Date.now()}`);
const tempNewOzFolder = path.join(userTmpFolderPath, 'new');
const tempOldOzFolder = path.join(userTmpFolderPath, 'old');
const tempNewOzContractsFolder = path.join(tempNewOzFolder, openZeppelinFolderName);
const currentAssets = await getAllDownloadedAssetsAsync();
const { isDownloadSucceed, newAssets } =
await downloadNewVersionOfAssetsAsync(currentAssets, newManifest, tempNewOzContractsFolder);
if (!isDownloadSucceed) {
throwOpenZeppelinUpgradeException(userTmpFolderPath);
}
if (!newAssets.length) {
return;
}
try {
const mergedAssets = await mergeAssetsWithExisting(newAssets);
await createNewProjectJsonAsync(newManifest.getVersion(), mergedAssets, tempNewOzFolder);
} catch {
throwOpenZeppelinUpgradeException(userTmpFolderPath);
}
// Backup old openZeppelin data
await moveFolderAsync(await getOpenZeppelinFolderPath(), tempOldOzFolder);
await moveProjectJsonAsync(tempOldOzFolder, true);
// Update new openZeppelin data
await moveFolderAsync(tempNewOzContractsFolder, await ContractService.getSolidityContractsFolderPath());
await moveProjectJsonAsync(tempNewOzFolder, false);
try {
fs.removeSync(userTmpFolderPath);
} catch {
// Ignore exception since upgrade is finished
}
Telemetry.sendEvent('OpenZeppelinService.updateOpenZeppelinContractsAsync.finished');
}
export async function getAssetsWithParameters(): Promise<IOZAsset[]> {
const currentAssets = await getAllDownloadedAssetsAsync();
return currentAssets.filter((asset) => !!asset.requiredParameters);
}
export async function mergeAssetsWithExisting(currentAssets: IOZAsset[]): Promise<IOZAsset[]> {
const assetsWithParameters = await OpenZeppelinService.getAssetsWithParameters();
return currentAssets.map((asset) => {
if (asset.requiredParameters) {
const assetWithParameters = assetsWithParameters.find((element) => element.id === asset.id);
if (assetWithParameters && assetWithParameters.requiredParameters) {
for (const [currentContractName, currentContractParameters] of Object.entries(asset.requiredParameters)) {
const existedContractParameters = assetWithParameters.requiredParameters[currentContractName];
if (existedContractParameters) {
for (const parameter of currentContractParameters) {
const param = existedContractParameters.find((existedContractParameter) =>
existedContractParameter.name === parameter.name &&
existedContractParameter.type === parameter.type);
if (param) {
parameter.value = param.value;
}
}
}
}
}
}
return asset;
});
}
export async function getOpenZeppelinFolderPath(): Promise<string> {
return path.join(await ContractService.getSolidityContractsFolderPath(), openZeppelinFolderName);
}
}
function getOzContractsFromProjectMetadata(
openZeppelinSubfolder: string,
userProjectMetadata: IProjectMetadata)
: string[] {
return Object.values(userProjectMetadata.openZeppelin.assets)
.map((asset) => getAssetFullPath(openZeppelinSubfolder, asset));
}
function getAssetFullPath(baseDir: string, asset: IOZAsset): string {
return path.join(baseDir, asset.name);
}
function isFileExists(filePath: string): boolean {
try {
return fs.lstatSync(filePath).isFile();
} catch (error) {
if (error.code === 'ENOENT') {
return false;
}
throw error;
}
}
function getOriginalHash(ozContractPath: string, openZeppelinSubfolder: string, userProjectMetadata: IProjectMetadata)
: string {
const assetName = path.relative(openZeppelinSubfolder, ozContractPath).replace(/\\/g, '/');
const originalAsset = userProjectMetadata.openZeppelin.assets.find((i) => i.name === assetName);
if (originalAsset) {
return originalAsset.hash;
}
return '';
}
async function moveFolderAsync(folderPath: string, newLocationPath: string): Promise<void> {
await fs.ensureDir(newLocationPath);
const newFolderPath = path.join(newLocationPath, path.basename(folderPath));
return fs.rename(folderPath, newFolderPath);
}
async function moveProjectJsonAsync(nonUserProjectFolder: string, folderIsDestination: boolean): Promise<void> {
const userProjectJsonPath = OpenZeppelinProjectJsonService.getProjectJsonPath();
const nonUserProjectJsonPath = path
.join(nonUserProjectFolder, OpenZeppelinProjectJsonService.getProjectJsonFileName());
if (folderIsDestination) {
return fs.rename(userProjectJsonPath, nonUserProjectJsonPath);
}
return fs.rename(nonUserProjectJsonPath, userProjectJsonPath);
}
async function createNewProjectJsonAsync(version: string, assets: IOZAsset[], folder: string): Promise<void> {
let projectJson = OpenZeppelinProjectJsonService.getProjectJson();
projectJson = await OpenZeppelinProjectJsonService.addVersionToProjectJsonAsync(version, false, projectJson);
projectJson = await OpenZeppelinProjectJsonService.addAssetsToProjectJsonAsync(assets, projectJson);
const newPath = path.join(folder, OpenZeppelinProjectJsonService.getProjectJsonFileName());
return OpenZeppelinProjectJsonService.storeProjectJsonAsync(projectJson, newPath);
}
function throwOpenZeppelinUpgradeException(tempFolder: string): void {
fs.removeSync(tempFolder);
throw new Error(Constants.openZeppelin.contractsUpgradeIsFailed);
}
async function downloadNewVersionOfAssetsAsync(
assets: IOZAsset[],
newManifest: OpenZeppelinManifest,
toFolder: string)
: Promise<{ isDownloadSucceed: boolean, newAssets: IOZAsset[] }> {
if (!assets.length) {
return { isDownloadSucceed: true, newAssets: [] };
}
const assetIdsToDownload = newManifest.getAssets()
.filter((newAsset) => assets.some((asset) => asset.name === newAsset.name)) // Search by name match
.map((asset) => asset.id);
if (!assetIdsToDownload.length) {
return { isDownloadSucceed: true, newAssets: [] };
}
const newAssets = newManifest.collectAssetsWithDependencies(assetIdsToDownload);
const sourceUrl = newManifest.getBaseUrlToContractsSource();
const downloadResult = await OpenZeppelinService.downloadAssetsAsync(sourceUrl, newAssets, true, toFolder);
const isDownloadFailed = downloadResult.some((result) => result.state === PromiseState.rejected);
if (isDownloadFailed) {
return { isDownloadSucceed: false, newAssets: [] };
}
return { isDownloadSucceed: true, newAssets };
}
function getArrayParameterValue(value: string, type: string): string {
if (type !== 'string[]') {
return value;
}
const elements = value.slice(1, value.length - 1).split(',');
const values = elements?.map((element) => `"${element.trim()}"`);
return `[${values?.join(', ')}]`;
} | the_stack |
namespace esp32 {
// pylint: disable=bad-whitespace
const _SET_NET_CMD = 0x10
const _SET_PASSPHRASE_CMD = 0x11
const _SET_DEBUG_CMD = 0x1A
const _GET_TEMP_CMD = 0x1B
const _GET_CONN_STATUS_CMD = 0x20
const _GET_IPADDR_CMD = 0x21
const _GET_MACADDR_CMD = 0x22
const _GET_CURR_SSID_CMD = 0x23
const _GET_CURR_RSSI_CMD = 0x25
const _GET_CURR_ENCT_CMD = 0x26
const _SCAN_NETWORKS = 0x27
const _GET_SOCKET_CMD = 0x3F
const _GET_STATE_TCP_CMD = 0x29
const _DATA_SENT_TCP_CMD = 0x2A
const _AVAIL_DATA_TCP_CMD = 0x2B
const _GET_DATA_TCP_CMD = 0x2C
const _START_CLIENT_TCP_CMD = 0x2D
const _STOP_CLIENT_TCP_CMD = 0x2E
const _GET_CLIENT_STATE_TCP_CMD = 0x2F
const _DISCONNECT_CMD = 0x30
const _GET_IDX_RSSI_CMD = 0x32
const _GET_IDX_ENCT_CMD = 0x33
const _REQ_HOST_BY_NAME_CMD = 0x34
const _GET_HOST_BY_NAME_CMD = 0x35
const _START_SCAN_NETWORKS = 0x36
const _GET_FW_VERSION_CMD = 0x37
const _PING_CMD = 0x3E
const _SEND_DATA_TCP_CMD = 0x44
const _GET_DATABUF_TCP_CMD = 0x45
const _SET_ENT_IDENT_CMD = 0x4A
const _SET_ENT_UNAME_CMD = 0x4B
const _SET_ENT_PASSWD_CMD = 0x4C
const _SET_ENT_ENABLE_CMD = 0x4F
const _SET_PIN_MODE_CMD = 0x50
const _SET_DIGITAL_WRITE_CMD = 0x51
const _SET_ANALOG_WRITE_CMD = 0x52
const _START_CMD = 0xE0
const _END_CMD = 0xEE
const _ERR_CMD = 0xEF
const _REPLY_FLAG = 1 << 7
const _CMD_FLAG = 0
export const SOCKET_CLOSED = 0
export const SOCKET_LISTEN = 1
export const SOCKET_SYN_SENT = 2
export const SOCKET_SYN_RCVD = 3
export const SOCKET_ESTABLISHED = 4
export const SOCKET_FIN_WAIT_1 = 5
export const SOCKET_FIN_WAIT_2 = 6
export const SOCKET_CLOSE_WAIT = 7
export const SOCKET_CLOSING = 8
export const SOCKET_LAST_ACK = 9
export const SOCKET_TIME_WAIT = 10
export const WL_NO_SHIELD = 0xFF
export const WL_NO_MODULE = 0xFF
export const WL_IDLE_STATUS = 0
export const WL_NO_SSID_AVAIL = 1
export const WL_SCAN_COMPLETED = 2
export const WL_CONNECTED = 3
export const WL_CONNECT_FAILED = 4
export const WL_CONNECTION_LOST = 5
export const WL_DISCONNECTED = 6
export const WL_AP_LISTENING = 7
export const WL_AP_CONNECTED = 8
export const WL_AP_FAILED = 9
function buffer1(ch: number) {
const b = control.createBuffer(1)
b[0] = ch
return b
}
export class NinaController extends net.Controller {
private _socknum_ll: Buffer[];
private _locked: boolean;
public wasConnected: boolean;
constructor(
private _spi: SPI,
private _cs: DigitalInOutPin,
private _busy: DigitalInOutPin,
private _reset: DigitalInOutPin,
private _gpio0: DigitalInOutPin = null
) {
super();
// if nothing connected, pretend the device is ready -
// we'll check for timeout waiting for response instead
this._busy.setPull(PinPullMode.PullDown);
this._busy.digitalRead();
this._socknum_ll = [buffer1(0)]
this._spi.setFrequency(8000000);
this.reset();
this._locked = false;
}
/**
* Hard reset the ESP32 using the reset pin
*/
public reset(): void {
if (this._gpio0)
this._gpio0.digitalWrite(true);
this._cs.digitalWrite(true)
this._reset.digitalWrite(false)
// reset
pause(10)
this._reset.digitalWrite(true)
// wait for it to boot up
pause(750)
if (this._gpio0)
this._gpio0.digitalRead();
// make sure SPI gets initialized while the CS is up
this.spiTransfer(control.createBuffer(1), null)
net.log('reseted esp32')
}
private readByte(): number {
const r = buffer1(0)
this.spiTransfer(null, r)
return r[0]
}
private checkData(desired: number, msg?: string): boolean {
const r = this.readByte()
if (r != desired)
net.fail(`Expected ${desired} but got ${r}; ` + (msg || ""))
return false;
}
/** Read a byte with a time-out, and if we get it, check that its what we expect */
private waitSPIChar(desired: number): boolean {
let times = control.millis()
while (control.millis() - times < 100) {
let r = this.readByte()
if (r == _ERR_CMD) {
net.log("error response to command")
return false
}
if (r == desired) {
return true
}
//net.log(`read char ${r}, expected ${desired}`)
}
net.log("timed out waiting for SPI char")
return false;
}
/**
* Wait until the ready pin goes low
*/
private waitForReady() {
net.debug(`wait for ready ${this._busy.digitalRead()}`);
if (this._busy.digitalRead()) {
pauseUntil(() => !this._busy.digitalRead(), 10000);
net.debug(`busy = ${this._busy.digitalRead()}`);
// pause(1000)
}
if (this._busy.digitalRead()) {
net.log("timed out waiting for ready")
return false
}
return true
}
private _sendCommand(cmd: number, params?: Buffer[], param_len_16?: boolean) {
params = params || [];
// compute buffer size
let n = 3; // START_CMD, cmd, length
params.forEach(param => {
n += 1 + (param_len_16 ? 1 : 0) + param.length;
})
n += 1; // END_CMD
// padding
while (n % 4) n++;
const packet = control.createBuffer(n);
let k = 0;
packet[k++] = _START_CMD;
packet[k++] = cmd & ~_REPLY_FLAG;
packet[k++] = params.length;
params.forEach(param => {
if (param_len_16)
packet[k++] = (param.length >> 8) & 0xFF;
packet[k++] = param.length & 0xFF;
packet.write(k, param);
k += param.length;
})
packet[k++] = _END_CMD;
while (k < n)
packet[k++] = 0xff;
net.debug(`send cmd ${packet.toHex()}`)
if (!this.waitForReady())
return false
this._cs.digitalWrite(false)
this.spiTransfer(packet, null)
this._cs.digitalWrite(true)
net.debug(`send done`);
return true
}
private spiTransfer(tx: Buffer, rx: Buffer) {
if (!tx) tx = control.createBuffer(rx.length)
if (!rx) rx = control.createBuffer(tx.length)
this._spi.transfer(tx, rx);
}
private _waitResponseCmd(cmd: number, num_responses?: number, param_len_16?: boolean) {
net.debug(`wait response cmd`);
if (!this.waitForReady())
return null
this._cs.digitalWrite(false)
let responses: Buffer[] = []
if (!this.waitSPIChar(_START_CMD)) {
this._cs.digitalWrite(true)
return null
}
this.checkData(cmd | _REPLY_FLAG)
if (num_responses !== undefined)
this.checkData(num_responses, cmd + "")
else
num_responses = this.readByte();
for (let num = 0; num < num_responses; ++num) {
let param_len = this.readByte()
if (param_len_16) {
param_len <<= 8
param_len |= this.readByte()
}
net.debug(`\tParameter #${num} length is ${param_len}`)
const response = control.createBuffer(param_len);
this.spiTransfer(null, response)
responses.push(response);
}
this.checkData(_END_CMD);
this._cs.digitalWrite(true)
net.debug(`responses ${responses.length}`);
return responses;
}
private lock() {
while (this._locked) {
pauseUntil(() => !this._locked)
}
this._locked = true
}
private unlock() {
if (!this._locked)
net.fail("not locked!")
this._locked = false;
}
private sendCommandGetResponse(cmd: number, params?: Buffer[],
reply_params = 1, sent_param_len_16 = false, recv_param_len_16 = false) {
this.lock()
this._sendCommand(cmd, params, sent_param_len_16)
const resp = this._waitResponseCmd(cmd, reply_params, recv_param_len_16)
this.unlock();
return resp
}
get status(): number {
const resp = this.sendCommandGetResponse(_GET_CONN_STATUS_CMD)
if (!resp)
return WL_NO_SHIELD
net.debug(`status: ${resp[0][0]}`);
// one byte response
return resp[0][0];
}
/** A string of the firmware version on the ESP32 */
get firmwareVersion(): string {
let resp = this.sendCommandGetResponse(_GET_FW_VERSION_CMD)
if (!resp)
return "not connected"
return resp[0].toString();
}
/** A bytearray containing the MAC address of the ESP32 */
get MACaddress(): Buffer {
let resp = this.sendCommandGetResponse(_GET_MACADDR_CMD, [hex`ff`])
if (!resp)
return null
// for whatever reason, the mac adderss is backwards
const res = control.createBuffer(6)
for (let i = 0; i < 6; ++i)
res[i] = resp[0][5 - i]
return res
}
/** Begin a scan of visible access points. Follow up with a call
to 'get_scan_networks' for response
*/
private startScanNetworks(): void {
let resp = this.sendCommandGetResponse(_START_SCAN_NETWORKS)
if (resp[0][0] != 1) {
net.fail("failed to start AP scan")
}
}
/** The results of the latest SSID scan. Returns a list of dictionaries with
'ssid', 'rssi' and 'encryption' entries, one for each AP found
*/
private getScanNetworks(): net.AccessPoint[] {
let names = this.sendCommandGetResponse(_SCAN_NETWORKS, undefined, undefined)
// print("SSID names:", names)
// pylint: disable=invalid-name
let APs = []
let i = 0
for (let name of names) {
let a_p = new net.AccessPoint(name.toString())
let rssi = this.sendCommandGetResponse(_GET_IDX_RSSI_CMD, [buffer1(i)])[0]
a_p.rssi = pins.unpackBuffer("<i", rssi)[0]
let encr = this.sendCommandGetResponse(_GET_IDX_ENCT_CMD, [buffer1(1)])[0]
if (encr[0])
a_p.flags |= net.WifiAPFlags.HasPassword
APs.push(a_p)
i++
}
return APs
}
/** Scan for visible access points, returns a list of access point details.
Returns a list of dictionaries with 'ssid', 'rssi' and 'encryption' entries,
one for each AP found
*/
protected scanNetworksCore(): net.AccessPoint[] {
this.startScanNetworks()
// attempts
for (let _ = 0; _ < 10; ++_) {
pause(2000)
// pylint: disable=invalid-name
let APs = this.getScanNetworks()
if (APs) {
for (const ap of APs)
net.debug(` ${ap.ssid} => RSSI ${ap.rssi}`)
return APs
}
}
return null
}
/** Tells the ESP32 to set the access point to the given ssid */
public wifiSetNetwork(ssid: string): void {
const ssidbuf = control.createBufferFromUTF8(ssid);
let resp = this.sendCommandGetResponse(_SET_NET_CMD, [ssidbuf])
if (resp[0][0] != 1) {
net.fail("failed to set network")
}
}
/** Sets the desired access point ssid and passphrase */
public wifiSetPassphrase(ssid: string, passphrase: string): void {
const ssidbuf = control.createBufferFromUTF8(ssid);
const passphrasebuf = control.createBufferFromUTF8(passphrase);
let resp = this.sendCommandGetResponse(_SET_PASSPHRASE_CMD, [ssidbuf, passphrasebuf])
if (resp[0][0] != 1) {
net.fail("failed to set passphrase")
}
}
/** Sets the WPA2 Enterprise anonymous identity */
public wifiSetEntidentity(ident: string): void {
const ssidbuf = control.createBufferFromUTF8(ident);
let resp = this.sendCommandGetResponse(_SET_ENT_IDENT_CMD, [ssidbuf])
if (resp[0][0] != 1) {
net.fail("failed to set enterprise anonymous identity")
}
}
/** Sets the desired WPA2 Enterprise username */
public wifiSetEntusername(username: string): void {
const usernamebuf = control.createBufferFromUTF8(username);
let resp = this.sendCommandGetResponse(_SET_ENT_UNAME_CMD, [usernamebuf])
if (resp[0][0] != 1) {
net.fail("failed to set enterprise username")
}
}
/** Sets the desired WPA2 Enterprise password */
public wifiSetEntpassword(password: string): void {
const passwordbuf = control.createBufferFromUTF8(password);
let resp = this.sendCommandGetResponse(_SET_ENT_PASSWD_CMD, [passwordbuf])
if (resp[0][0] != 1) {
net.fail("failed to set enterprise password")
}
}
/** Enables WPA2 Enterprise mode */
public wifiSetEntenable(): void {
let resp = this.sendCommandGetResponse(_SET_ENT_ENABLE_CMD)
if (resp[0][0] != 1) {
net.fail("failed to enable enterprise mode")
}
}
get ssidBuffer(): Buffer {
let resp = this.sendCommandGetResponse(_GET_CURR_SSID_CMD, [hex`ff`])
return resp[0]
}
get ssid(): string {
const b = this.ssidBuffer;
return b ? b.toString() : "";
}
get rssi(): number {
let resp = this.sendCommandGetResponse(_GET_CURR_RSSI_CMD, [hex`ff`])
return pins.unpackBuffer("<i", resp[0])[0]
}
get networkData(): any {
let resp = this.sendCommandGetResponse(_GET_IPADDR_CMD, [hex`ff`])
return resp[0]; //?
}
get ipAddress(): string {
return this.networkData["ip_addr"]
}
get isConnected(): boolean {
return this.status == WL_CONNECTED
}
get isIdle(): boolean {
return this.status == WL_IDLE_STATUS;
}
/**
* Connect to an access point with given name and password.
* Will retry up to 10 times and return on success
*/
connectAP(ssid: string, password: string): boolean {
net.log(`connect to ${ssid}`)
if (password) {
this.wifiSetPassphrase(ssid, password)
} else {
this.wifiSetNetwork(ssid)
}
// retries
let stat;
for (let _ = 0; _ < 10; ++_) {
stat = this.status
if (stat == WL_CONNECTED) {
this.wasConnected = true;
net.log("connected")
return true;
}
pause(1000)
}
if ([WL_CONNECT_FAILED, WL_CONNECTION_LOST, WL_DISCONNECTED].indexOf(stat) >= 0) {
net.log(`failed to connect to "${ssid}" (${stat})`)
}
if (stat == WL_NO_SSID_AVAIL) {
net.log(`no such ssid: "${ssid}"`)
}
return false;
}
/**
* Convert a hostname to a packed 4-byte IP address. Returns
a 4 bytearray
*/
public hostbyName(hostname: string): Buffer {
if (!this.connect())
return undefined;
let resp = this.sendCommandGetResponse(_REQ_HOST_BY_NAME_CMD, [control.createBufferFromUTF8(hostname)])
if (resp[0][0] != 1) {
net.fail("failed to request hostname")
}
resp = this.sendCommandGetResponse(_GET_HOST_BY_NAME_CMD)
return resp[0];
}
/** Ping a destination IP address or hostname, with a max time-to-live
(ttl). Returns a millisecond timing value
*/
public ping(dest: string, ttl: number = 250): number {
if (!this.connect())
return -1;
// convert to IP address
let ip = this.hostbyName(dest)
// ttl must be between 0 and 255
ttl = Math.max(0, Math.min(ttl | 0, 255))
let resp = this.sendCommandGetResponse(_PING_CMD, [ip, buffer1(ttl)])
return pins.unpackBuffer("<H", resp[0])[0];
}
/** Request a socket from the ESP32, will allocate and return a number that
can then be passed to the other socket commands
*/
public socket(): number {
if (!this.connect())
net.fail("can't connect");
net.debug("*** Get socket")
let resp0 = this.sendCommandGetResponse(_GET_SOCKET_CMD)
let resp = resp0[0][0]
if (resp == 255) {
net.fail("no sockets available")
}
net.debug("Allocated socket #" + resp)
return resp
}
/** Open a socket to a destination IP address or hostname
using the ESP32's internal reference number. By default we use
'conn_mode' TCP_MODE but can also use UDP_MODE or TLS_MODE
(dest must be hostname for TLS_MODE!)
*/
public socketOpen(socket_num: number, dest: Buffer | string, port: number, conn_mode = net.TCP_MODE): void {
this._socknum_ll[0][0] = socket_num
net.debug("*** Open socket: " + dest + ":" + port)
let port_param = pins.packBuffer(">H", [port])
let resp: Buffer[]
// use the 5 arg version
if (typeof dest == "string") {
const dest2 = control.createBufferFromUTF8(dest)
resp = this.sendCommandGetResponse(_START_CLIENT_TCP_CMD, [dest2, hex`00000000`, port_param, this._socknum_ll[0], buffer1(conn_mode)])
} else {
// ip address, use 4 arg vesion
resp = this.sendCommandGetResponse(_START_CLIENT_TCP_CMD, [dest, port_param, this._socknum_ll[0], buffer1(conn_mode)])
}
if (resp[0][0] != 1) {
net.fail("could not connect to remote server")
}
}
/** Get the socket connection status, can be SOCKET_CLOSED, SOCKET_LISTEN,
SOCKET_SYN_SENT, SOCKET_SYN_RCVD, SOCKET_ESTABLISHED, SOCKET_FIN_WAIT_1,
SOCKET_FIN_WAIT_2, SOCKET_CLOSE_WAIT, SOCKET_CLOSING, SOCKET_LAST_ACK, or
SOCKET_TIME_WAIT
*/
public socketStatus(socket_num: number): number {
this._socknum_ll[0][0] = socket_num
let resp = this.sendCommandGetResponse(_GET_CLIENT_STATE_TCP_CMD, this._socknum_ll)
return resp[0][0]
}
/** Test if a socket is connected to the destination, returns boolean true/false */
public socket_connected(socket_num: number): boolean {
return this.socketStatus(socket_num) == SOCKET_ESTABLISHED
}
/** Write the bytearray buffer to a socket */
public socketWrite(socket_num: number, buffer: Buffer): void {
net.debug("Writing:" + buffer.length)
this._socknum_ll[0][0] = socket_num
let resp = this.sendCommandGetResponse(_SEND_DATA_TCP_CMD, [this._socknum_ll[0], buffer], 1, true)
let sent = resp[0].getNumber(NumberFormat.UInt16LE, 0)
if (sent != buffer.length) {
net.fail(`failed to send ${buffer.length} bytes (sent ${sent})`)
}
resp = this.sendCommandGetResponse(_DATA_SENT_TCP_CMD, this._socknum_ll)
if (resp[0][0] != 1) {
net.fail("failed to verify data sent")
}
}
/** Determine how many bytes are waiting to be read on the socket */
public socketAvailable(socket_num: number): number {
this._socknum_ll[0][0] = socket_num
let resp = this.sendCommandGetResponse(_AVAIL_DATA_TCP_CMD, this._socknum_ll)
let reply = pins.unpackBuffer("<H", resp[0])[0]
net.debug(`ESPSocket: ${reply} bytes available`)
return reply
}
/** Read up to 'size' bytes from the socket number. Returns a bytearray */
public socketRead(socket_num: number, size: number): Buffer {
net.debug(`Reading ${size} bytes from ESP socket with status ${this.socketStatus(socket_num)}`)
this._socknum_ll[0][0] = socket_num
let resp = this.sendCommandGetResponse(_GET_DATABUF_TCP_CMD,
[this._socknum_ll[0], pins.packBuffer("<H", [size])],
1, true, true)
net.debug(`buf >>${resp[0].toString()}<<`)
return resp[0]
}
/** Open and verify we connected a socket to a destination IP address or hostname
using the ESP32's internal reference number. By default we use
'conn_mode' TCP_MODE but can also use UDP_MODE or TLS_MODE (dest must
be hostname for TLS_MODE!)
*/
public socketConnect(socket_num: number, dest: string | Buffer, port: number, conn_mode = net.TCP_MODE): boolean {
net.debug("*** Socket connect mode " + conn_mode)
this.socketOpen(socket_num, dest, port, conn_mode)
let times = net.monotonic()
// wait 3 seconds
while (net.monotonic() - times < 3) {
if (this.socket_connected(socket_num)) {
return true
}
pause(10)
}
net.fail("failed to establish connection")
return false
}
/** Close a socket using the ESP32's internal reference number */
public socketClose(socket_num: number): void {
net.debug("*** Closing socket #" + socket_num)
this._socknum_ll[0][0] = socket_num
let resp = this.sendCommandGetResponse(_STOP_CLIENT_TCP_CMD, this._socknum_ll)
if (resp[0][0] != 1) {
net.fail("failed to close socket")
}
}
/** Enable/disable debug mode on the ESP32. Debug messages will be
written to the ESP32's UART.
*/
public setESPdebug(enabled: boolean) {
let resp = this.sendCommandGetResponse(_SET_DEBUG_CMD, [buffer1(enabled ? 1 : 0)])
if (resp[0][0] != 1) {
net.fail("failed to set debug mode")
}
}
public getTemperature() {
let resp = this.sendCommandGetResponse(_GET_TEMP_CMD, [])
if (resp[0].length != 4) {
net.fail("failed to get temp")
}
return resp[0].getNumber(NumberFormat.Float32LE, 0)
}
/**
Set the io mode for a GPIO pin.
:param int pin: ESP32 GPIO pin to set.
:param value: direction for pin, digitalio.Direction or integer (0=input, 1=output).
*/
public setPinMode(pin: number, pin_mode: number): void {
let resp = this.sendCommandGetResponse(_SET_PIN_MODE_CMD, [buffer1(pin), buffer1(pin_mode)])
if (resp[0][0] != 1) {
net.fail("failed to set pin mode")
}
}
/**
Set the digital output value of pin.
:param int pin: ESP32 GPIO pin to write to.
:param bool value: Value for the pin.
*/
public setDigitalWrite(pin: number, value: number): void {
let resp = this.sendCommandGetResponse(_SET_DIGITAL_WRITE_CMD, [buffer1(pin), buffer1(value)])
if (resp[0][0] != 1) {
net.fail("failed to write to pin")
}
}
/**
Set the analog output value of pin, using PWM.
:param int pin: ESP32 GPIO pin to write to.
:param float value: 0=off 1.0=full on
*/
public setAnalogWrite(pin: number, analog_value: number) {
let value = Math.trunc(255 * analog_value)
let resp = this.sendCommandGetResponse(_SET_ANALOG_WRITE_CMD, [buffer1(pin), buffer1(value)])
if (resp[0][0] != 1) {
net.fail("failed to write to pin")
}
}
}
//% shim=esp32spi::flashDevice
export function flashDevice() {
return
}
} | the_stack |
import { loggerSpy } from '../testhelpers/loggerSpy';
import { importSingleText } from '../testhelpers/importSingleText';
import {
assertAddElementRule,
assertAssignmentRule,
assertBindingRule,
assertCardRule,
assertCaretValueRule,
assertContainsRule,
assertFlagRule,
assertInsertRule,
assertObeysRule,
assertOnlyRule
} from '../testhelpers';
import { FshCode } from '../../src/fshtypes';
import { leftAlign } from '../utils/leftAlign';
import { importText, RawFSH } from '../../src/import';
describe('FSHImporter', () => {
describe('Resource', () => {
describe('#sdMetadata', () => {
it('should parse the simplest possible resource', () => {
const input = `
Resource: Simple
`;
const result = importSingleText(input, 'Simple.fsh');
expect(result.resources.size).toBe(1);
const resource = result.resources.get('Simple');
expect(resource.name).toBe('Simple');
expect(resource.parent).toBe('DomainResource');
// if no id is explicitly set, should default to name
expect(resource.id).toBe('Simple');
expect(resource.sourceInfo.location).toEqual({
startLine: 2,
startColumn: 9,
endLine: 2,
endColumn: 24
});
expect(resource.sourceInfo.file).toBe('Simple.fsh');
});
it('should parse resource with name matching various possible tokens recognized as name', () => {
// This basically exercises all the tokens we accept for name:
// SEQUENCE | NUMBER | KW_MS | KW_SU | KW_TU | KW_NORMATIVE | KW_DRAFT | KW_CODES | KW_VSREFERENCE | KW_SYSTEM | KW_UNITS;
// Since we'll do the same thing over and over (and over), create a function for it
const testToken = (token: string) => {
const input = leftAlign(`
Resource: ${token}
* value[x] only boolean
`);
const result = importSingleText(input);
expect(loggerSpy.getAllLogs('error')).toHaveLength(0);
expect(result).toBeDefined();
expect(result.resources.size).toBe(1);
const resource = result.resources.get(token);
expect(resource).toBeDefined();
expect(resource.name).toBe(token);
};
testToken('MyResource'); // SEQUENCE
testToken('123'); // NUMBER
testToken('MS'); // KW_MS
testToken('SU'); // KW_SU
testToken('TU'); // KW_TU
testToken('N'); // KW_NORMATIVE
testToken('D'); // KW_DRAFT
testToken('codes'); // KW_CODES
testToken('valueset'); // KW_VSREFERENCE
testToken('system'); // KW_SYSTEM
testToken('units'); // KW_UNITS
});
it('should log an error when encountering the mixins metadata attribute', () => {
const input = `
Resource: TestWithMixins
Mixins: Mixin1 and Mixin2 and Mixin3 and Mixin4
`;
importSingleText(input, 'TestWithMixins.fsh');
expect(loggerSpy.getLastMessage('error')).toMatch(/File: TestWithMixins\.fsh.*Line: 3\D*/s);
});
it('should parse resource with additional metadata properties', () => {
const input = `
Resource: TestWithMeta
Id: test-with-meta
Title: "A Test with Meta Resource"
Description: "Test resource with meta"
`;
const result = importSingleText(input);
expect(result.resources.size).toBe(1);
const resource = result.resources.get('TestWithMeta');
expect(resource.name).toBe('TestWithMeta');
expect(resource.parent).toBe('DomainResource');
expect(resource.id).toBe('test-with-meta');
expect(resource.title).toBe('A Test with Meta Resource');
expect(resource.description).toBe('Test resource with meta');
expect(resource.sourceInfo.location).toEqual({
startLine: 2,
startColumn: 9,
endLine: 5,
endColumn: 46
});
});
it('should parse resource with numeric name and id', () => {
const input = `
Resource: 123
Id: 789
`;
const result = importSingleText(input);
expect(result.resources.size).toBe(1);
const resource = result.resources.get('123');
expect(resource.name).toBe('123');
expect(resource.id).toBe('789');
});
it('should properly parse a multi-string description', () => {
const input = `
Resource: TestWithMultiStringDesc
Description:
"""
This is a multi-string description
with a couple of paragraphs.
This is the second paragraph. It has bullet points w/ indentation:
* Bullet 1
* Bullet A
* Bullet B
* Bullet i
* Bullet C
* Bullet 2
"""
`;
const result = importSingleText(input);
expect(result.resources.size).toBe(1);
const resource = result.resources.get('TestWithMultiStringDesc');
const expectedDescriptionLines = [
'This is a multi-string description',
'with a couple of paragraphs.',
'',
'This is the second paragraph. It has bullet points w/ indentation:',
'',
'* Bullet 1',
' * Bullet A',
' * Bullet B',
' * Bullet i',
' * Bullet C',
'* Bullet 2'
];
expect(resource.description).toBe(expectedDescriptionLines.join('\n'));
});
it('should only apply each metadata attribute the first time it is declared', () => {
const input = `
Resource: TestResource
Id: test-resource
Title: "A Test Resource"
Description: "A resource used for tests"
Id: duplicate-test-resource
Title: "Duplicate Test Resource"
Description: "A duplicated resource"
`;
const result = importSingleText(input);
expect(result.resources.size).toBe(1);
const resource = result.resources.get('TestResource');
expect(resource.name).toBe('TestResource');
expect(resource.id).toBe('test-resource');
expect(resource.title).toBe('A Test Resource');
expect(resource.description).toBe('A resource used for tests');
});
it('should log an error when encountering a duplicate metadata attribute', () => {
const input = `
Resource: TestResource
Id: test-resource
Title: "A Test Resource"
Description: "A resource used for tests"
Id: duplicate-test-resource
Title: "Duplicate Test Resource"
Description: "A duplicated resource"
`;
importSingleText(input, 'Dupe.fsh');
expect(loggerSpy.getMessageAtIndex(-3, 'error')).toMatch(/File: Dupe\.fsh.*Line: 6\D*/s);
expect(loggerSpy.getMessageAtIndex(-2, 'error')).toMatch(/File: Dupe\.fsh.*Line: 7\D*/s);
expect(loggerSpy.getLastMessage('error')).toMatch(/File: Dupe\.fsh.*Line: 8\D*/s);
});
it('should log an error and skip the resource when encountering a resource with a name used by another resource', () => {
const input = `
Resource: TestResource
Title: "First Test Resource"
Resource: TestResource
Title: "Second Test Resource"
`;
const result = importSingleText(input, 'SameName.fsh');
expect(result.resources.size).toBe(1);
const resource = result.resources.get('TestResource');
expect(resource.title).toBe('First Test Resource');
expect(loggerSpy.getLastMessage('error')).toMatch(
/Resource named TestResource already exists/s
);
expect(loggerSpy.getLastMessage('error')).toMatch(/File: SameName\.fsh.*Line: 5 - 6\D*/s);
});
it('should log an error and skip the resource when encountering an resource with a name used by another resource in another file', () => {
const input1 = `
Resource: SameResource
Title: "First Resource"
`;
const input2 = `
Resource: SameResource
Title: "Second Resource"
`;
const result = importText([
new RawFSH(input1, 'File1.fsh'),
new RawFSH(input2, 'File2.fsh')
]);
expect(result.reduce((sum, d2) => sum + d2.resources.size, 0)).toBe(1);
const r = result[0].resources.get('SameResource');
expect(r.title).toBe('First Resource');
expect(loggerSpy.getLastMessage('error')).toMatch(
/Resource named SameResource already exists/s
);
expect(loggerSpy.getLastMessage('error')).toMatch(/File: File2\.fsh.*Line: 2 - 3\D*/s);
});
});
// Tests for all supported rules are in FSHImporter.SD-Rules.test.ts
// Since Resources use the same rule parsing code as other StructureDefinitions, only do minimal tests of rules
describe('#cardRule', () => {
it('should parse simple card rules', () => {
const input = `
Resource: TestResource
* category 1..5
* value[x] 1..1
* component 2..*
`;
const result = importSingleText(input);
const resource = result.resources.get('TestResource');
expect(resource.rules).toHaveLength(3);
assertCardRule(resource.rules[0], 'category', 1, 5);
assertCardRule(resource.rules[1], 'value[x]', 1, 1);
assertCardRule(resource.rules[2], 'component', 2, '*');
});
it('should parse card rules w/ multiple flags', () => {
const input = `
Resource: TestResource
* category 1..5 MS ?! TU
* value[x] 1..1 ?! SU N
* component 2..* SU MS D
`;
const result = importSingleText(input);
const resource = result.resources.get('TestResource');
expect(resource.rules).toHaveLength(6);
assertCardRule(resource.rules[0], 'category', 1, 5);
assertFlagRule(
resource.rules[1],
'category',
true,
undefined,
true,
true,
undefined,
undefined
);
assertCardRule(resource.rules[2], 'value[x]', 1, 1);
assertFlagRule(
resource.rules[3],
'value[x]',
undefined,
true,
true,
undefined,
true,
undefined
);
assertCardRule(resource.rules[4], 'component', 2, '*');
assertFlagRule(
resource.rules[5],
'component',
true,
true,
undefined,
undefined,
undefined,
true
);
});
});
describe('#flagRule', () => {
it('should parse single-path single-value flag rules', () => {
const input = `
Resource: TestResource
* category MS
* value[x] ?!
* component SU
* interpretation TU
* note N
* bodySite D
`;
const result = importSingleText(input);
const resource = result.resources.get('TestResource');
expect(resource.rules).toHaveLength(6);
assertFlagRule(
resource.rules[0],
'category',
true,
undefined,
undefined,
undefined,
undefined,
undefined
);
assertFlagRule(
resource.rules[1],
'value[x]',
undefined,
undefined,
true,
undefined,
undefined,
undefined
);
assertFlagRule(
resource.rules[2],
'component',
undefined,
true,
undefined,
undefined,
undefined,
undefined
);
assertFlagRule(
resource.rules[3],
'interpretation',
undefined,
undefined,
undefined,
true,
undefined,
undefined
);
assertFlagRule(
resource.rules[4],
'note',
undefined,
undefined,
undefined,
undefined,
true,
undefined
);
assertFlagRule(
resource.rules[5],
'bodySite',
undefined,
undefined,
undefined,
undefined,
undefined,
true
);
});
});
describe('#BindingRule', () => {
it('should parse value set rules w/ names and strengths', () => {
const input = `
Resource: TestResource
* category from CategoryValueSet (required)
* code from CodeValueSet (extensible)
* valueCodeableConcept from ValueValueSet (preferred)
* component.code from ComponentCodeValueSet (example)
`;
const result = importSingleText(input);
const resource = result.resources.get('TestResource');
expect(resource.rules).toHaveLength(4);
assertBindingRule(resource.rules[0], 'category', 'CategoryValueSet', 'required');
assertBindingRule(resource.rules[1], 'code', 'CodeValueSet', 'extensible');
assertBindingRule(resource.rules[2], 'valueCodeableConcept', 'ValueValueSet', 'preferred');
assertBindingRule(resource.rules[3], 'component.code', 'ComponentCodeValueSet', 'example');
});
});
describe('#assignmentRule', () => {
it('should parse assigned value boolean rule', () => {
const input = `
Resource: TestResource
* valueBoolean = true
`;
const result = importSingleText(input);
const resource = result.resources.get('TestResource');
expect(resource.rules).toHaveLength(1);
assertAssignmentRule(resource.rules[0], 'valueBoolean', true);
});
it('should parse assigned value boolean rule with (exactly) modifier', () => {
const input = `
Resource: TestResource
* valueBoolean = true (exactly)
`;
const result = importSingleText(input);
const resource = result.resources.get('TestResource');
expect(resource.rules).toHaveLength(1);
assertAssignmentRule(resource.rules[0], 'valueBoolean', true, true);
});
});
describe('#onlyRule', () => {
it('should parse an only rule with one type', () => {
const input = `
Resource: TestResource
* value[x] only Quantity
`;
const result = importSingleText(input);
const resource = result.resources.get('TestResource');
expect(resource.rules).toHaveLength(1);
assertOnlyRule(resource.rules[0], 'value[x]', { type: 'Quantity' });
});
});
describe('#containsRule', () => {
it('should parse contains rule with one item', () => {
const input = `
Resource: TestResource
* component contains SystolicBP 1..1
`;
const result = importSingleText(input);
const resource = result.resources.get('TestResource');
expect(resource.rules).toHaveLength(2);
assertContainsRule(resource.rules[0], 'component', 'SystolicBP');
assertCardRule(resource.rules[1], 'component[SystolicBP]', 1, 1);
});
it('should parse contains rule with one item declaring an aliased type', () => {
const input = `
Alias: OffsetExtension = http://hl7.org/fhir/StructureDefinition/observation-timeOffset
Resource: TestResource
* component.extension contains OffsetExtension named offset 0..1
`;
const result = importSingleText(input);
const resource = result.resources.get('TestResource');
expect(resource.rules).toHaveLength(2);
assertContainsRule(resource.rules[0], 'component.extension', {
name: 'offset',
type: 'http://hl7.org/fhir/StructureDefinition/observation-timeOffset'
});
assertCardRule(resource.rules[1], 'component.extension[offset]', 0, 1);
});
});
describe('#caretValueRule', () => {
it('should parse caret value rules with a path', () => {
const input = `
Resource: TestResource
* status ^short = "foo"
* status ^sliceIsConstraining = false
* status ^code[0] = foo#bar "baz"
`;
const result = importSingleText(input);
const resource = result.resources.get('TestResource');
assertCaretValueRule(resource.rules[0], 'status', 'short', 'foo', false);
assertCaretValueRule(resource.rules[1], 'status', 'sliceIsConstraining', false, false);
assertCaretValueRule(
resource.rules[2],
'status',
'code[0]',
new FshCode('bar', 'foo', 'baz').withLocation([5, 29, 5, 41]).withFile(''),
false
);
});
it('should parse caret value rules with triple-quoted string', () => {
const input = `
Resource: TestResource
* status ^short = "foo"
* status ^definition = """
This definition includes markdown for unordered lists:
* Level 1 list item 1
* Level 2 list item 1a
* Level 2 list item 1b
* Level 1 list item 2
"""
* status ^sliceIsConstraining = false
* status ^code[0] = foo#bar "baz"
`;
const result = importSingleText(input);
const resource = result.resources.get('TestResource');
assertCaretValueRule(resource.rules[0], 'status', 'short', 'foo', false);
const expectedDefinition =
'This definition includes markdown for unordered lists:\n* Level 1 list item 1\n * Level 2 list item 1a\n * Level 2 list item 1b\n* Level 1 list item 2';
assertCaretValueRule(resource.rules[1], 'status', 'definition', expectedDefinition, false);
assertCaretValueRule(resource.rules[2], 'status', 'sliceIsConstraining', false, false);
assertCaretValueRule(
resource.rules[3],
'status',
'code[0]',
new FshCode('bar', 'foo', 'baz').withLocation([12, 29, 12, 41]).withFile(''),
false
);
});
});
describe('#obeysRule', () => {
it('should parse an obeys rule with multiple invariants and a path', () => {
const input = `
Resource: TestResource
* category obeys SomeInvariant and ThisInvariant and ThatInvariant
`;
const result = importSingleText(input, 'Obeys.fsh');
const resource = result.resources.get('TestResource');
expect(resource.rules).toHaveLength(3);
assertObeysRule(resource.rules[0], 'category', 'SomeInvariant');
assertObeysRule(resource.rules[1], 'category', 'ThisInvariant');
assertObeysRule(resource.rules[2], 'category', 'ThatInvariant');
});
});
describe('#insertRule', () => {
it('should parse an insert rule with a single RuleSet', () => {
const input = `
Resource: TestResource
* insert MyRuleSet
`;
const result = importSingleText(input, 'Insert.fsh');
const resource = result.resources.get('TestResource');
expect(resource.rules).toHaveLength(1);
assertInsertRule(resource.rules[0], '', 'MyRuleSet');
});
});
describe('#addElementRule', () => {
it('should parse complete add element rules', () => {
const input = `
Resource: TestResource
* isValid 1..1 MS boolean "is it valid?"
* stuff 0..* string "just stuff" "a list of some stuff"
* address 1..* Address "Just an address"
`;
const result = importSingleText(input);
const resource = result.resources.get('TestResource');
expect(resource.rules).toHaveLength(3);
assertAddElementRule(resource.rules[0], 'isValid', {
card: { min: 1, max: '1' },
flags: { mustSupport: true },
types: [{ type: 'boolean' }],
defs: { short: 'is it valid?', definition: 'is it valid?' }
});
assertAddElementRule(resource.rules[1], 'stuff', {
card: { min: 0, max: '*' },
types: [{ type: 'string' }],
defs: { short: 'just stuff', definition: 'a list of some stuff' }
});
assertAddElementRule(resource.rules[2], 'address', {
card: { min: 1, max: '*' },
types: [{ type: 'Address' }],
defs: { short: 'Just an address', definition: 'Just an address' }
});
});
it('should parse complete add element rules with triple-quoted string for definition', () => {
const input = `
Resource: TestResource
* isValid 1..1 MS boolean "is it valid?"
* stuff 0..* string "just stuff" "a list of some stuff"
* address 1..* Address "Just an address"
"""
This definition for address includes markdown for unordered lists:
* Level 1 list item 1
* Level 2 list item 1a
* Level 2 list item 1b
* Level 1 list item 2
"""
`;
const result = importSingleText(input);
const resource = result.resources.get('TestResource');
expect(resource.rules).toHaveLength(3);
assertAddElementRule(resource.rules[0], 'isValid', {
card: { min: 1, max: '1' },
flags: { mustSupport: true },
types: [{ type: 'boolean' }],
defs: { short: 'is it valid?', definition: 'is it valid?' }
});
assertAddElementRule(resource.rules[1], 'stuff', {
card: { min: 0, max: '*' },
types: [{ type: 'string' }],
defs: { short: 'just stuff', definition: 'a list of some stuff' }
});
const expectedDefinition =
'This definition for address includes markdown for unordered lists:\n* Level 1 list item 1\n * Level 2 list item 1a\n * Level 2 list item 1b\n* Level 1 list item 2';
assertAddElementRule(resource.rules[2], 'address', {
card: { min: 1, max: '*' },
types: [{ type: 'Address' }],
defs: { short: 'Just an address', definition: expectedDefinition }
});
});
});
});
}); | the_stack |
import L = require("leaflet");
interface TimelineSliderControlOptions extends L.ControlOptions {
/**
* Minimum time, in ms, for the playback to take. Will almost certainly
* actually take at least a bit longer; after each frame, the next one
* displays in `duration/steps` ms, so each frame really takes frame
* processing time PLUS step time.
*
* Default: 10000
*/
duration?: number;
/**
* Allow playback to be controlled using the spacebar (play/pause) and
* right/left arrow keys (next/previous).
*
* Default: false
*/
enableKeyboardControls?: boolean;
/**
* Show playback controls (i.e. prev/play/pause/next).
*
* Default: true
*/
enablePlayback?: boolean;
/**
* Show ticks on the timeline (if the browser supports it)
*
* See here for support:
* https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input/range#Browser_compatibility
*
* Default: true
*/
showTicks?: boolean;
/**
* Wait until the user is finished changing the date to update the map. By
* default, both the map and the date update for every change. With complex
* data, this can slow things down, so set this to true to only update the
* displayed date.
*/
waitToUpdateMap?: boolean;
/**
* The start time of the timeline. If unset, this will be calculated
* automatically based on the timelines registered to this control.
*/
start?: number;
/**
* The end time of the timeline. If unset, this will be calculated
* automatically based on the timelines registered to this control.
*/
end?: number;
/**
* How many steps to break the timeline into. Each step will then be
* `(end-start) / steps`. Only affects playback.
*
* Default: 1000
*/
steps?: number;
/**
* Start playback of the timeline as soon as the page is loaded.
*
* Default: false
*/
autoPlay?: boolean;
/**
* A function which takes the current time value (a Unix timestamp) and
* outputs a string that is displayed beneath the control buttons.
*/
formatOutput?(time: number): string;
}
/** @ignore */
type PlaybackControl = "play" | "pause" | "prev" | "next";
/** @ignore */
type TSC = L.TimelineSliderControl;
declare module "leaflet" {
export class TimelineSliderControl extends L.Control {
container: HTMLElement;
options: Required<TimelineSliderControlOptions>;
timelines: L.Timeline[];
start: number;
end: number;
map: L.Map;
time: number;
syncedControl: TSC[];
/** @ignore */
_datalist?: HTMLDataListElement;
/** @ignore */
_output?: HTMLOutputElement;
/** @ignore */
_stepDuration: number;
/** @ignore */
_stepSize: number;
/** @ignore */
_timeSlider: HTMLInputElement;
/** @ignore */
_playing: boolean;
/** @ignore */
_timer: number;
/** @ignore */
_listener: (ev: KeyboardEvent) => any;
/** @ignore */
initialize(this: TSC, options: TimelineSliderControlOptions): void;
/** @ignore */
_getTimes(this: TSC): number[];
/** @ignore */
_nearestEventTime(this: TSC, findTime: number, mode?: 1 | -1): number;
/** @ignore */
_recalculate(this: TSC): void;
/** @ignore */
_createDOM(this: TSC): void;
/** @ignore */
_addKeyListeners(this: TSC): void;
/** @ignore */
_removeKeyListeners(this: TSC): void;
/** @ignore */
_buildDataList(this: TSC, container: HTMLElement): void;
/** @ignore */
_rebuildDataList(this: TSC): void;
/** @ignore */
_makeButton(this: TSC, container: HTMLElement, name: PlaybackControl): void;
/** @ignore */
_makeButtons(this: TSC, container: HTMLElement): void;
/** @ignore */
_makeOutput(this: TSC, container: HTMLElement): void;
/** @ignore */
_makeSlider(this: TSC, container: HTMLElement): void;
/** @ignore */
_onKeydown(this: TSC, ev: KeyboardEvent): void;
/** @ignore */
_sliderChanged(this: TSC, e: Event): void;
/** @ignore */
_setTime(this: TSC, time: number, type: string): void;
/** @ignore */
_disableMapDragging(this: TSC): void;
/** @ignore */
_enableMapDragging(this: TSC): void;
/** @ignore */
_resetIfTimelinesChanged(this: TSC, oldTimelineCount: number): void;
/** @ignore */
_autoPlay(this: TSC): void;
play(this: TSC, fromSynced?: boolean): void;
pause(this: TSC, fromSynced?: boolean): void;
prev(this: TSC): void;
next(this: TSC): void;
toggle(this: TSC): void;
setTime(this: TSC, time: number): void;
addTimelines(this: TSC, ...timelines: L.Timeline[]): void;
removeTimelines(this: TSC, ...timelines: L.Timeline[]): void;
syncControl(this: TSC, controlToSync: TSC): void;
}
let timelineSliderControl: (options?: TimelineSliderControlOptions) => TSC;
}
// @ts-ignore
L.TimelineSliderControl = L.Control.extend({
initialize(options = {}) {
const defaultOptions: TimelineSliderControlOptions = {
duration: 10000,
enableKeyboardControls: false,
enablePlayback: true,
formatOutput: (output) => `${output || ""}`,
showTicks: true,
waitToUpdateMap: false,
position: "bottomleft",
steps: 1000,
autoPlay: false,
};
this.timelines = [];
L.Util.setOptions(this, defaultOptions);
L.Util.setOptions(this, options);
this.start = options.start || 0;
this.end = options.end || 0;
},
/* INTERNAL API *************************************************************/
/**
* @private
* @returns A flat, sorted list of all the times of all layers
*/
_getTimes() {
const times: number[] = [];
this.timelines.forEach((timeline) => {
const timesInRange = timeline.times.filter(
(time) => time >= this.start && time <= this.end
);
times.push(...timesInRange);
});
if (times.length) {
times.sort((a, b) => a - b);
const dedupedTimes = [times[0]];
times.reduce((a, b) => {
if (a !== b) {
dedupedTimes.push(b);
}
return b;
});
return dedupedTimes;
}
return times;
},
/**
* Adjusts start/end/step size. Should be called if any of those might
* change (e.g. when adding a new layer).
*
* @private
*/
_recalculate() {
const manualStart = typeof this.options.start !== "undefined";
const manualEnd = typeof this.options.end !== "undefined";
const duration = this.options.duration;
let min = Infinity;
let max = -Infinity;
this.timelines.forEach((timeline) => {
if (timeline.start < min) {
min = timeline.start;
}
if (timeline.end > max) {
max = timeline.end;
}
});
if (!manualStart) {
this.start = min;
this._timeSlider.min = (min === Infinity ? 0 : min).toString();
this._timeSlider.value = this._timeSlider.min;
}
if (!manualEnd) {
this.end = max;
this._timeSlider.max = (max === -Infinity ? 0 : max).toString();
}
this._stepSize = Math.max(1, (this.end - this.start) / this.options.steps);
this._stepDuration = Math.max(1, duration / this.options.steps);
},
/**
* @private
* @param findTime The time to find events around
* @param mode The operating mode.
* If `mode` is 1, finds the event immediately after `findTime`.
* If `mode` is -1, finds the event immediately before `findTime`.
* @returns The time of the nearest event.
*/
_nearestEventTime(findTime, mode = 1) {
const times = this._getTimes();
let retNext = false;
let lastTime = times[0];
for (let i = 1; i < times.length; i++) {
const time = times[i];
if (retNext) {
return time;
}
if (time >= findTime) {
if (mode === -1) {
return lastTime;
}
if (time === findTime) {
retNext = true;
} else {
return time;
}
}
lastTime = time;
}
return lastTime;
},
/* DOM CREATION & INTERACTION ***********************************************/
/**
* Create all of the DOM for the control.
*
* @private
*/
_createDOM() {
const classes = [
"leaflet-control-layers",
"leaflet-control-layers-expanded",
"leaflet-timeline-control",
];
const container = L.DomUtil.create("div", classes.join(" "));
this.container = container;
if (this.options.enablePlayback) {
const sliderCtrlC = L.DomUtil.create(
"div",
"sldr-ctrl-container",
container
);
const buttonContainer = L.DomUtil.create(
"div",
"button-container",
sliderCtrlC
);
this._makeButtons(buttonContainer);
if (this.options.enableKeyboardControls) {
this._addKeyListeners();
}
this._makeOutput(sliderCtrlC);
}
this._makeSlider(container);
if (this.options.showTicks) {
this._buildDataList(container);
}
if (this.options.autoPlay) {
this._autoPlay();
}
},
/**
* Add keyboard listeners for keyboard control
*
* @private
*/
_addKeyListeners(): void {
this._listener = (ev: KeyboardEvent) => this._onKeydown(ev);
document.addEventListener("keydown", this._listener);
},
/**
* Remove keyboard listeners
*
* @private
*/
_removeKeyListeners(): void {
document.removeEventListener("keydown", this._listener);
},
/**
* Constructs a <datalist>, for showing ticks on the range input.
*
* @private
* @param container The container to which to add the datalist
*/
_buildDataList(container): void {
this._datalist = L.DomUtil.create(
"datalist",
"",
container
) as HTMLDataListElement;
const idNum = Math.floor(Math.random() * 1000000);
this._datalist.id = `timeline-datalist-${idNum}`;
this._timeSlider.setAttribute("list", this._datalist.id);
this._rebuildDataList();
},
/**
* Reconstructs the <datalist>. Should be called when new data comes in.
*/
_rebuildDataList(): void {
const datalist = this._datalist;
if (!datalist) return;
while (datalist.firstChild) {
datalist.removeChild(datalist.firstChild);
}
const datalistSelect = L.DomUtil.create("select", "", this._datalist);
datalistSelect.setAttribute("aria-label", "List of times");
this._getTimes().forEach((time) => {
(L.DomUtil.create(
"option",
"",
datalistSelect
) as HTMLOptionElement).value = time.toString();
});
},
/**
* Makes a button with the passed name as a class, which calls the
* corresponding function when clicked. Attaches the button to container.
*
* @private
* @param container The container to which to add the button
* @param name The class to give the button and the function to call
*/
_makeButton(container, name) {
const button = L.DomUtil.create("button", name, container);
button.setAttribute("aria-label", name);
button.addEventListener("click", () => this[name]());
L.DomEvent.disableClickPropagation(button);
},
/**
* Makes the prev, play, pause, and next buttons
*
* @private
* @param container The container to which to add the buttons
*/
_makeButtons(container) {
this._makeButton(container, "prev");
this._makeButton(container, "play");
this._makeButton(container, "pause");
this._makeButton(container, "next");
},
/**
* DOM event handler to disable dragging on map
*
* @private
*/
_disableMapDragging() {
this.map.dragging.disable();
},
/**
* DOM event handler to enable dragging on map
*
* @private
*/
_enableMapDragging() {
this.map.dragging.enable();
},
/**
* Creates the range input
*
* @private
* @param container The container to which to add the input
*/
_makeSlider(container) {
const slider = L.DomUtil.create(
"input",
"time-slider",
container
) as HTMLInputElement;
slider.setAttribute("aria-label", "Slider");
slider.type = "range";
slider.min = (this.start || 0).toString();
slider.max = (this.end || 0).toString();
slider.value = (this.start || 0).toString();
this._timeSlider = slider;
// register events using leaflet for easy removal
L.DomEvent.on(
this._timeSlider,
"mousedown mouseup click touchstart",
L.DomEvent.stopPropagation
);
L.DomEvent.on(this._timeSlider, "change input", this._sliderChanged, this);
L.DomEvent.on(
this._timeSlider,
"mouseenter",
this._disableMapDragging,
this
);
L.DomEvent.on(
this._timeSlider,
"mouseleave",
this._enableMapDragging,
this
);
},
_makeOutput(container) {
this._output = L.DomUtil.create(
"output",
"time-text",
container
) as HTMLOutputElement;
this._output.innerHTML = this.options.formatOutput(this.start);
},
_onKeydown(e) {
let target = (e.target || e.srcElement) as HTMLElement;
if (!/INPUT|TEXTAREA/.test(target.tagName)) {
switch (e.keyCode || e.which) {
case 37:
this.prev();
break;
case 39:
this.next();
break;
case 32:
this.toggle();
break;
default:
return;
}
e.preventDefault();
}
},
_sliderChanged(e) {
const { target } = e;
const time = parseFloat(
target instanceof HTMLInputElement ? target.value : "0"
);
this._setTime(time, e.type);
},
_setTime(time: number, type: string) {
this.time = time;
if (!this.options.waitToUpdateMap || type === "change") {
this.timelines.forEach((timeline) => timeline.setTime(time));
}
if (this._output) {
this._output.innerHTML = this.options.formatOutput(time);
}
},
_resetIfTimelinesChanged(oldTimelineCount) {
if (this.timelines.length !== oldTimelineCount) {
this._recalculate();
if (this.options.showTicks) {
this._rebuildDataList();
}
this.setTime(this.start);
}
},
_autoPlay() {
if (document.readyState === "loading") {
window.addEventListener("load", () => this._autoPlay());
} else {
this.play();
}
},
/* EXTERNAL API *************************************************************/
/**
* Register timeline layers with this control. This could change the start and
* end points of the timeline (unless manually set). It will also reset the
* playback.
*
* @param timelines The `L.Timeline`s to register
*/
addTimelines(...timelines) {
this.pause();
const timelineCount = this.timelines.length;
timelines.forEach((timeline) => {
if (this.timelines.indexOf(timeline) === -1) {
this.timelines.push(timeline);
}
});
this._resetIfTimelinesChanged(timelineCount);
},
/**
* Unregister timeline layers with this control. This could change the start
* and end points of the timeline unless manually set. It will also reset the
* playback.
*
* @param timelines The `L.Timeline`s to unregister
*/
removeTimelines(...timelines) {
this.pause();
const timelineCount = this.timelines.length;
timelines.forEach((timeline) => {
const index = this.timelines.indexOf(timeline);
if (index !== -1) {
this.timelines.splice(index, 1);
}
});
this._resetIfTimelinesChanged(timelineCount);
},
/**
* Toggles play/pause state.
*/
toggle() {
if (this._playing) {
this.pause();
} else {
this.play();
}
},
/**
* Pauses playback and goes to the previous event.
*/
prev() {
this.pause();
const prevTime = this._nearestEventTime(this.time, -1);
this._timeSlider.value = prevTime.toString();
this.setTime(prevTime);
},
/**
* Pauses playback.
*/
pause(fromSynced) {
window.clearTimeout(this._timer);
this._playing = false;
this.container?.classList.remove("playing");
if (this.syncedControl && !fromSynced) {
this.syncedControl.map(function (control) {
control.pause(true);
});
}
},
/**
* Starts playback.
*/
play(fromSynced) {
window.clearTimeout(this._timer);
if (parseFloat(this._timeSlider.value) === this.end) {
this._timeSlider.value = this.start.toString();
}
this._timeSlider.value = (
parseFloat(this._timeSlider.value) + this._stepSize
).toString();
this.setTime(+this._timeSlider.value);
if (parseFloat(this._timeSlider.value) === this.end) {
this._playing = false;
this.container?.classList.remove("playing");
} else {
this._playing = true;
this.container?.classList.add("playing");
this._timer = window.setTimeout(
() => this.play(true),
this._stepDuration
);
}
if (this.syncedControl && !fromSynced) {
this.syncedControl.map(function (control) {
control.play(true);
});
}
},
/**
* Pauses playback and goes to the next event.
*/
next() {
this.pause();
const nextTime = this._nearestEventTime(this.time, 1);
this._timeSlider.value = nextTime.toString();
this.setTime(nextTime);
},
/**
* Set the time displayed.
*
* @param time The time to set
*/
setTime(time: number) {
if (this._timeSlider) this._timeSlider.value = time.toString();
this._setTime(time, "change");
},
onAdd(map: L.Map): HTMLElement {
this.map = map;
this._createDOM();
this.setTime(this.start);
return this.container;
},
onRemove() {
/* istanbul ignore else */
if (this.options.enableKeyboardControls) {
this._removeKeyListeners();
}
// cleanup events registered in _makeSlider
L.DomEvent.off(this._timeSlider, "change input", this._sliderChanged, this);
L.DomEvent.off(
this._timeSlider,
"pointerdown mousedown touchstart",
this._disableMapDragging,
this
);
L.DomEvent.off(
document.body,
"pointerup mouseup touchend",
this._enableMapDragging,
this
);
// make sure that dragging is restored to enabled state
this._enableMapDragging();
},
syncControl(controlToSync) {
if (!this.syncedControl) {
this.syncedControl = [];
}
this.syncedControl.push(controlToSync);
},
});
L.timelineSliderControl = (options?: TimelineSliderControlOptions) =>
new L.TimelineSliderControl(options); | the_stack |
import { ensureError, formatUserError, MultiError, notNull, sleep, toArray, UserError } from "@adpt/utils";
import AsyncLock from "async-lock";
import db from "debug";
import { alg, Graph } from "graphlib";
import { flatten, isError, isObject } from "lodash";
import PQueue from "p-queue";
import pTimeout from "p-timeout";
import { inspect } from "util";
import { makeElementStatus } from "../dom";
import { domForEach } from "../dom_utils";
import { ElementNotInDom, InternalError } from "../error";
import { Handle, isHandle } from "../handle";
import {
AdaptElement,
AdaptMountedElement,
// @ts-ignore - here to deal with issue #71
AnyProps,
ElementID,
FinalDomElement,
isElement,
isMountedElement,
isPrimitiveElement,
} from "../jsx";
import { Deployment } from "../server/deployment";
import { DeployOpID } from "../server/deployment_data";
import { Status } from "../status";
import {
Action,
ChangeType,
Dependency,
DeployHelpers,
DeployOpStatus,
DeployStatus,
DeployStatusExt,
EPPrimitiveDependencies,
ExecuteComplete,
ExecuteOptions,
ExecutionPlan,
ExecutionPlanOptions,
GoalStatus,
goalToInProgress,
isDependsOn,
isFinalStatus,
isInProgress,
isProxying,
isRelation,
Relation,
} from "./deploy_types";
import {
EPNode,
EPNodeId,
EPObject,
ExecutePassOptions,
isWaitInfo,
StatusTracker,
WaitInfo,
} from "./deploy_types_private";
import { DeployedWhenQueue } from "./deployed_when_queue";
import {
relatedHandles,
relationInverse,
relationIsReady,
relationIsReadyStatus,
relationToString,
waitStatusToString,
} from "./relation_utils";
import { And, Edge } from "./relations";
import { createStatusTracker } from "./status_tracker";
const debugExecute = db("adapt:deploy:execute");
const debugExecuteDetail = db("adapt:detail:deploy:execute");
export function createExecutionPlan(options: ExecutionPlanOptions): ExecutionPlan {
const plan = new ExecutionPlanImpl(options);
function addParents(e: AdaptMountedElement) {
const kids: (AdaptMountedElement | null)[] = e.buildData.origChildren as any || [];
kids.forEach((child) => isElement(child) && plan.setParent(child, e));
const succ = e.buildData.successor;
if (succ) plan.setParent(succ, e);
}
const { actions, diff, newDom } = options;
diff.added.forEach((e) => plan.addElem(e, DeployStatus.Deployed));
diff.commonNew.forEach((e) => plan.addElem(e, DeployStatus.Deployed));
diff.deleted.forEach((e) => plan.addElem(e, DeployStatus.Destroyed));
diff.added.forEach(addParents);
diff.commonNew.forEach(addParents);
diff.deleted.forEach(addParents);
debugExecuteDetail(`Plan before deps`, plan.print());
debugExecuteDetail(`Saved deps`, options.dependencies);
plan.addSavedDependencies(options.dependencies);
// Force memoization of primitiveDependencies. MUST happen before actions
// are added because adding actions removes primitive Elements from the
// graph and collapses dependencies.
plan.computePrimitiveDependencies(newDom);
actions.forEach((a) => plan.addAction(a));
return plan;
}
export function getWaitInfo(goalStatus: GoalStatus,
e: AdaptElement | Handle, helpers: DeployHelpers): WaitInfo {
const hand = isHandle(e) ? e : e.props.handle;
const elem = hand.mountedOrig;
if (elem === undefined) throw new InternalError("element has no mountedOrig!");
if (elem === null) throw new ElementNotInDom();
if (!elem.built()) {
return {
deployedWhen: () => true,
description: elem.componentName,
};
}
const dependsOn = elem.dependsOn(goalStatus, helpers);
if (dependsOn && !isDependsOn(dependsOn)) {
throw new UserError(`Component '${elem.componentName}' dependsOn ` +
`method returned a value that is not a DependsOn object. ` +
`[Element id: ${elem.id}] returned: ${inspect(dependsOn)}`);
}
const wi: WaitInfo = {
description: dependsOn ? dependsOn.description : elem.componentName,
deployedWhen: (gs: GoalStatus) => elem.deployedWhen(gs, helpers),
};
if (dependsOn) wi.dependsOn = dependsOn;
return wi;
}
const elIdToNodeId = (id: ElementID) => "E:" + id;
const elToNodeId = (e: AdaptMountedElement) => "E:" + e.id;
/**
* Dependency types:
*
* - FinishStart: This is also referrred to as a "soft" FinishStart
* dependency. It is current the only type of dependency that users can
* manually specify, which is done via a `dependsOn` method or related
* mechanism. These dependencies are inserted into the plan graph,
* but are really checked/enforced by the Relations associated with the
* node, not solely by the graph. The reason for this is to support
* Relation types besides just a strict dependency, such as a
* Relation with a logical "OR". This enables creating a dependency like
* waiting for one instance of a failover group to be deployed, rather than
* waiting for all instances to be deployed.
* When executing a plan, a node that has only incomplete FinishStart
* (soft) dependencies remaining will get signaled to check its Relations
* every time one of the node's dependencies change to a finished state.
*
* - FinishStartHard: This dependency type is the more traditional strict
* dependency one might expect in a dependency graph. When N1 depends
* on N2 with FinishStartHard type, N1 will not start until N2 finishes.
* A node will never get signaled to do any work or check its Relations as long
* as it has any FinishStartHard dependencies remaining in the graph. This
* dependency type is currently only created via `plan.addSavedDependencies`,
* which is used for saved/reanimated DOMs. Note that a node's Relations will
* not necessarily have or enforce these dependencies, so the graph must do all
* the enforcing.
*
* - StartStart: When N1 depends on N2 with a StartStart type, N1 will not start
* until N2 has started. These dependencies are created automatically
* between an Element and its parent and/or predecessors in the DOM via
* `plan.setParent`.
*/
export enum DependencyType {
FinishStart = "FinishStart",
FinishStartHard = "FinishStartHard",
StartStart = "StartStart",
}
export interface EPEdge {
edgeType: DependencyType;
}
export interface EPDependency {
id: EPNodeId;
type: DependencyType;
}
export interface EPDependencies {
[epNodeId: string]: {
elementId?: ElementID;
detail: string;
deps: EPDependency[];
};
}
export interface ExecutionPlanImplOptions {
deployment: Deployment;
deployOpID: DeployOpID;
goalStatus: GoalStatus;
}
export class ExecutionPlanImpl implements ExecutionPlan {
readonly deployment: Deployment;
readonly deployOpID: DeployOpID;
readonly goalStatus: GoalStatus;
readonly helpers: DeployHelpersFactory;
protected graph = new Graph({ compound: true });
protected nextWaitId = 0;
protected _primitiveDependencies?: EPPrimitiveDependencies;
protected waitInfoIds = new WeakMap<WaitInfo, string>();
/** Nodes that are complete or have been replaced in the graph by actions */
protected nonGraph = new Map<EPNodeId, EPNode>();
constructor(options: ExecutionPlanImplOptions) {
this.goalStatus = options.goalStatus;
this.deployment = options.deployment;
this.deployOpID = options.deployOpID;
this.helpers = new DeployHelpersFactory(this, this.deployment);
}
/*
* Public interfaces
*/
check() {
// findCycles reports IDs in the opposite direction than we want to
// work with.
const allCycles = alg.findCycles(this.graph).map((c) => c.reverse());
if (allCycles.length === 0) return;
const toReport: string[][] = [];
// Cycles that consist solely of StartStart dependencies can be
// safely broken.
for (const c of allCycles) {
if (this.isStartStartCycle(c)) this.breakCycle(c);
else toReport.push(c);
}
if (toReport.length > 0) {
const cycles = toReport.map(this.printCycleGroups).join("\n");
if (debugExecute.enabled) {
debugExecute(`Execution plan dependencies:\n${this.print()}`);
}
throw new UserError(`There are circular dependencies present in this deployment:\n${cycles}`);
}
}
/*
* Semi-private interfaces (for use by this file)
*/
addElem(element: AdaptMountedElement, goalStatus: GoalStatus): void {
if (!element || this.hasNode(element)) return;
const helpers = this.helpers.create(element);
const waitInfo = getWaitInfo(goalStatus, element, helpers);
const children = new Set<EPNode>();
const node: EPNode = { children, element, goalStatus, waitInfo };
this.addNode(node);
this.addWaitInfo(node, goalStatus);
}
addAction(action: Action) {
if (action.type === ChangeType.none) return undefined;
const node: EPNode = {
children: new Set<EPNode>(),
goalStatus: changeTypeToGoalStatus(action.type),
waitInfo: {
description: action.detail,
action: action.act,
actingFor: action.changes,
activeAction: true,
deployedWhen: () => true,
logAction: true,
}
};
this.addNode(node);
action.changes.forEach((c) => {
if (c.type === ChangeType.none) return;
this.addElem(c.element, changeTypeToGoalStatus(c.type));
const elNode = this.getNode(c.element);
elNode.waitInfo.activeAction = true;
elNode.waitInfo.description = action.detail;
this.setActingFor(node, c.element);
});
return node;
}
addWaitInfo(nodeOrWI: WaitInfo | EPNode, goalStatus: GoalStatus) {
let node: EPNode;
let waitInfo: WaitInfo;
if (isWaitInfo(nodeOrWI)) {
node = {
children: new Set<EPNode>(),
goalStatus,
waitInfo: nodeOrWI,
};
waitInfo = nodeOrWI;
this.addNode(node);
} else {
node = nodeOrWI;
waitInfo = node.waitInfo;
}
if (waitInfo.dependsOn) {
const hands = relatedHandles(waitInfo.dependsOn);
hands.forEach((h) => {
if (!h.associated) {
// TODO: Add info about the handle, like traceback for
// where it was created.
throw new UserError(
`A Component dependsOn method returned a DependsOn ` +
`object '${waitInfo.description}' that contains ` +
`a Handle that is not associated with any Element`);
}
const el = toBuiltElem(h);
if (el) {
// If el has already been added, its goal
// status won't change.
this.addElem(el, goalStatus);
this.addEdge(node, el);
}
});
}
return node;
}
addSavedDependencies(saved: EPPrimitiveDependencies) {
for (const [id, depList] of Object.entries(saved)) {
const node = this.getNode(elIdToNodeId(id));
for (const dep of depList) {
this.addEdge(node, this.getNode(elIdToNodeId(dep)),
DependencyType.FinishStartHard);
}
}
}
/**
* Now used only in unit test. Should eventually be removed.
*/
updateElemWaitInfo(refresh = false) {
this.nodes.forEach((n) => {
const el = n.element;
if (el == null) return;
if (n.waitInfo != null && !refresh) throw new InternalError(`Expected EPNode.waitInfo to be null`);
const helpers = this.helpers.create(el);
n.waitInfo = getWaitInfo(n.goalStatus, el, helpers);
this.addWaitInfo(n, n.goalStatus);
});
}
addNode(node: EPNode) {
if (this.hasNode(node)) return;
this.graph.setNode(this.getId(node, true), node);
}
removeDep(obj: EPObject, dependsOn: EPNode) {
this.removeEdgeInternal(obj, dependsOn);
}
removeNode(node: EPNode) {
const id = this.getId(node);
this.graph.removeNode(id);
this.nonGraph.set(id, node);
}
predecessors(n: EPNode, edgeType: DependencyType | "all" = "all"): EPNode[] {
return this.neighbors(n, edgeType, "predecessors");
}
successors(n: EPNode, edgeType: DependencyType | "all" = "all"): EPNode[] {
return this.neighbors(n, edgeType, "successors");
}
/**
* Returns the set of nodes that `n` is acting on behalf of. For an
* Element node, this will always be an empty array and for an Action
* node, there should always be at least one node in the array.
*/
actingFor(n: EPObject): EPNode[] {
const node = this.getNode(n);
const fols = node.waitInfo.actingFor?.map((c) => c.element) || [];
return fols.map(this.getNode);
}
setParent(n: EPObject, parent: EPObject) {
n = this.getNode(n);
parent = this.getNode(parent);
parent.children.add(n);
this.addEdge(n, parent, DependencyType.StartStart);
}
/** Returns nodes active in the graph */
get nodes(): EPNode[] {
return this.graph.nodes().map(this.getNode);
}
/** Returns only nodes NOT active in the graph */
get nonGraphNodes(): EPNode[] {
return [ ...this.nonGraph.values() ];
}
/** Returns both graph and non-graph nodes */
get allNodes(): EPNode[] {
return [ ...this.nodes, ...this.nonGraph.values() ];
}
get elems(): AdaptMountedElement[] {
return this.nodes
.map((n) => n.element)
.filter(notNull);
}
get leaves(): EPNode[] {
return this.graph.sinks().map(this.getNode);
}
getId = (obj: EPObject, create = false): EPNodeId => {
const id = this.getIdInternal(obj, create);
if (!id) throw new Error(`ID not found (${idOrObjInfo(obj)})`);
return id;
}
getNode = (idOrObj: EPNodeId | EPObject): EPNode => {
const node = this.getNodeInternal(idOrObj);
if (!node) throw new Error(`Node not found (${idOrObjInfo(idOrObj)})`);
return node;
}
hasNode = (idOrObj: EPNodeId | EPObject): boolean => {
return this.getNodeInternal(idOrObj) != null;
}
toDependencies(type: DependencyType | "all"): EPDependencies {
const detail = (n: EPNode) => {
const w = n.waitInfo;
if (w) return w.description;
else if (n.element) return n.element.id;
return "unknown";
};
const getDeps = (node: EPNode, id: EPNodeId) => {
const succs = this.neighbors(node, type, "successors");
const deps: EPDependency[] = succs.map((n) => {
const nId = this.getId(n);
const edge = this.getEdgeInternal(id, nId);
if (!edge) throw new InternalError(`Consistency check failed: successor without edge ${id}->${nId}`);
return {
id: nId,
type: edge.edgeType,
};
});
const entry: EPDependencies[string] = { detail: detail(node), deps };
if (node.element) entry.elementId = node.element.id;
return entry;
};
const ret: EPDependencies = {};
const ids = alg.isAcyclic(this.graph) ?
alg.topsort(this.graph) : this.graph.nodes();
// Insert starting with leaves for a more human-readable ordering
for (let i = ids.length - 1; i >= 0; i--) {
const id = ids[i];
const node = this.getNode(id);
ret[id] = getDeps(node, id);
}
return ret;
}
print() {
const epDeps = this.toDependencies("all");
const depIDs = Object.keys(epDeps);
if (depIDs.length === 0) return "<empty>";
const succs = (id: string) => {
const list = epDeps[id] && epDeps[id].deps;
if (!list || list.length === 0) return " <none>";
return list.map((s) => ` ${name(s.id)} [${s.type}]`).join("\n");
};
const name = (id: string) => {
const w = this.getNode(id).waitInfo;
if (w) id += ` (${w.description})`;
return id;
};
const printDeps = (ids: string[]) =>
ids
.map((id) => ` ${name(id)}\n${succs(id)}`);
const byGoal: { [ goal: string ]: string[] | undefined } = {};
const insert = (id: string, goal: string) => {
const l = byGoal[goal] || [];
l.push(id);
byGoal[goal] = l;
};
for (const id of depIDs) {
insert(id, this.getNode(id).goalStatus);
}
const lines: string[] = [];
for (const goal of Object.keys(byGoal).sort()) {
let gName = goal;
try {
gName = goalToInProgress(goal as any);
} catch (e) { /* */ }
lines.push(`${gName}:`, ...printDeps(byGoal[goal]!));
}
return lines.join("\n");
}
get primitiveDependencies(): EPPrimitiveDependencies {
if (this._primitiveDependencies) return this._primitiveDependencies;
throw new InternalError(`Must call computePrimitiveDependencies before accessing primitiveDependencies`);
}
computePrimitiveDependencies(dom: FinalDomElement | null): void {
const domEls = new Set<AdaptMountedElement>();
domForEach(dom, (el) => domEls.add(el));
const isInDom = (n: EPNode) => n.element && domEls.has(n.element);
const work = new Map<string, Set<string>>();
const workFrom = (fromId: string) => {
const exists = work.get(fromId);
if (exists) return exists;
const f = new Set<string>();
work.set(fromId, f);
return f;
};
const addDep = (from: EPNode, to: EPNode) => {
if (!from.element) throw new InternalError(`Node '${this.getId(from)}' has no element `);
const fromId = from.element.id;
if (!to.element) throw new InternalError(`Node '${this.getId(to)}' has no element `);
const toId = to.element.id;
workFrom(fromId).add(toId);
};
const ids = alg.isAcyclic(this.graph) ?
alg.topsort(this.graph) : this.graph.nodes();
// Insert starting with leaves for a more human-readable ordering
for (let i = ids.length - 1; i >= 0; i--) {
const id = ids[i];
const node = this.getNode(id);
// Because the start of a node is gated by automatic StartStart
// dependencies between parents and their children, we only need the
// pruned dependencies for the "from" set of nodes (prune=true).
const fromPrims = this.primitiveDependencyTargets(node, true)
.filter(isInDom);
const succs = this.neighbors(node, DependencyType.FinishStart, "successors")
.concat(this.neighbors(node, DependencyType.FinishStartHard, "successors"));
// The finish of a node doesn't necessarily include its children
// because nodes can have custom deployedWhen functions. So ensure
// we get the complete (non-pruned) set of nodes for the "to"
// nodes (prune=false).
const toPrims = flatten(succs.map((s) => this.primitiveDependencyTargets(s, false)))
.filter(isInDom);
for (const from of fromPrims) {
for (const to of toPrims) {
addDep(from, to);
}
}
}
const finalDeps: EPPrimitiveDependencies = {};
for (const [from, toSet] of work.entries()) {
finalDeps[from] = [...toSet];
}
this._primitiveDependencies = finalDeps;
}
/*
* Class-internal methods
*/
protected getIdInternal = (obj: EPObject, create = false): EPNodeId | undefined => {
const wiId = (w: WaitInfo) => {
let id = this.waitInfoIds.get(w);
if (!id) {
if (!create) return undefined;
id = "W:" + this.nextWaitId++;
this.waitInfoIds.set(w, id);
}
return id;
};
if (isMountedElement(obj)) return elToNodeId(obj);
if (isWaitInfo(obj)) return wiId(obj);
if (isMountedElement(obj.element)) return elToNodeId(obj.element);
if (isWaitInfo(obj.waitInfo)) return wiId(obj.waitInfo);
throw new InternalError(`Invalid object in getId (${obj})`);
}
protected getNodeInternal = (idOrObj: EPNodeId | EPObject): EPNode | undefined => {
const id =
typeof idOrObj === "string" ? idOrObj :
this.getIdInternal(idOrObj);
if (!id) return undefined;
return this.graph.node(id) || this.nonGraph.get(id);
}
protected getEdgeInternal =
(idOrObj1: EPNodeId | EPObject, idOrObj2: EPNodeId | EPObject): EPEdge | undefined => {
const n1 = this.getNodeInternal(idOrObj1);
const n2 = this.getNodeInternal(idOrObj2);
if (!n1 || !n2) return undefined;
const id1 = this.getIdInternal(n1);
const id2 = this.getIdInternal(n2);
if (!id1 || !id2) return undefined;
return this.graph.edge(id1, id2);
}
/**
* The direction of the dependency has to be reversed for Destroy
* so that things are destroyed in "reverse order" (actually by
* walking the graph in the opposite order). But a single graph
* contains some things that are being Deployed and some that are
* being Destroyed.
* The arguments to the function (obj, dependsOn) identify two EPNodes.
* Each of those two EPNodes could have goalStatus Deployed or Destroyed,
* so there are 4 possible combinations:
* A) Deployed, Deployed
* This is the simple case where `dependsOn` should be Deployed
* before `obj` is Deployed. The edge is `obj` -> `dependsOn`.
* B) Destroyed, Destroyed
* Also simple. If `dependsOn` must be Deployed before `obj`, then
* it's reversed for Destroyed and `obj` must be Destroyed before
* `dependsOn`. The edge is `dependsOn` -> `obj`.
* C) Destroyed, Deployed
* The valid way this can happen when used with an actual old DOM
* and new DOM is that `obj` is from the old DOM. The new DOM does
* not contain this node and therefore *cannot* have a dependency
* on it. The dependency here can be ignored safely. No edge.
* D) Deployed, Destroyed
* This doesn't make sense right now because there's not really a
* way for a "living" component in the new DOM to get a reference
* to something being deleted from the old DOM. This is currently
* an error.
*/
protected addEdge(obj: EPObject, dependsOn: EPObject, edgeType = DependencyType.FinishStart) {
obj = this.getNode(obj);
dependsOn = this.getNode(dependsOn);
let a: EPNode;
let b: EPNode;
if (edgeType === DependencyType.StartStart) {
a = obj;
b = dependsOn;
} else {
const goals = `${obj.goalStatus},${dependsOn.goalStatus}`;
switch (goals) {
case "Deployed,Deployed": a = obj; b = dependsOn; break;
case "Destroyed,Destroyed": a = dependsOn; b = obj; break;
case "Destroyed,Deployed": return; // Intentionally no edge
case "Deployed,Destroyed":
default:
throw new InternalError(`Unable to create dependency for ` +
`invalid goal pair '${goals}'`);
}
}
this.addEdgeInternal(a, b, edgeType);
}
protected addEdgeInternal(obj: EPObject, dependsOn: EPObject, edgeType: DependencyType) {
const e: EPEdge = { edgeType };
const objId = this.getId(obj);
const depId = this.getId(dependsOn);
const existing: EPEdge | undefined = this.graph.edge(objId, depId);
if (existing) {
const pair = `${existing.edgeType},${edgeType}`;
switch (pair) {
case "FinishStart,FinishStart":
case "FinishStartHard,FinishStartHard":
case "StartStart,StartStart":
return; // No change requested
case "FinishStart,FinishStartHard":
break; // Allowed to upgrade to more restrictive type
case "FinishStartHard,FinishStart":
return; // Leave it at the more restrictive type
case "FinishStart,StartStart":
case "FinishStartHard,StartStart":
case "StartStart,FinishStart":
case "StartStart,FinishStartHard":
throw new Error(`Attempt to add multiple dependencies between the same Elements. ` +
`DependencyTypes=${pair} ${objId}->${depId}`);
default:
throw new InternalError(`Unhandled dependency types: ${pair}`);
}
}
this.graph.setEdge(objId, depId, e);
}
protected removeEdgeInternal(obj: EPObject, dependsOn: EPObject) {
const objId = this.getId(obj);
this.graph.removeEdge(objId, this.getId(dependsOn));
}
/**
* Retrieve predecessors or successors from the graph, optionally
* filtered for a particular DependencyType.
*/
protected neighbors(n: EPNode, edgeType: DependencyType | "all",
which: "predecessors" | "successors"): EPNode[] {
const nId = this.getId(n);
// Nodes that have been pulled out of the graph have no neighbors
if (this.nonGraph.has(nId)) return [];
let nbors = this.graph[which](nId);
if (nbors == null) throw new InternalError(`Requested node that's not in graph id=${nId}`);
if (edgeType !== "all") {
const getEdge = (nborId: string): EPEdge => {
const from = which === "predecessors" ? nborId : nId;
const to = which === "predecessors" ? nId : nborId;
const e = this.graph.edge(from, to);
if (!e) throw new InternalError(`No edge '${from}' -> '${to}' but in ${which}`);
return e;
};
nbors = nbors.filter((nbor) => getEdge(nbor).edgeType === edgeType);
}
return nbors.map(this.getNode);
}
/**
* Given an Element node N, returns the set of primitive Element
* nodes that are descendents of N (including N if it's a
* primitive Element node).
* If `prune` is true, stop recursively traversing descendents at the first
* primitive Element node found.
*/
protected primitiveDependencyTargets = (n: EPNode, prune: boolean): EPNode[] => {
const prims = new Set<EPNode>();
function addPrims(node: EPNode) {
const el = node.element;
if (!el) return;
if (isPrimitiveElement(el)) {
prims.add(node);
if (prune) return;
}
node.children.forEach(addPrims);
}
addPrims(n);
return [...prims];
}
protected printCycleGroups = (group: string[]) => {
if (group.length < 1) throw new InternalError(`Cycle group with no members`);
const nodeDesc = (n: EPNode, pad = 0) => {
const desc = n.element?.path || `Action: ${n.waitInfo.description}`;
return desc.padEnd(pad);
};
const ids = [...group, group[0]];
const nodes = ids.map(this.getNode);
let padLen = nodes.map((n) => nodeDesc(n).length).reduce((max, cur) => Math.max(max, cur));
padLen = Math.min(padLen, 65);
// The dependency type from (i-1) to (i).
const depType = (i: number) => {
if (i === 0) return "";
const e = this.getEdgeInternal(ids[i - 1], ids[i]);
return `[${e?.edgeType || "??"}]`;
};
const short = (n: EPNode, i: number) => {
const dt = depType(i);
const idx = i === ids.length - 1 ? 0 : i;
return `${idx.toString().padStart(2)}: ${nodeDesc(n, padLen)} ${dt}`;
};
const detail = (n: EPNode, i: number) => {
const info = [];
const niceId = n.element ? n.element.id : this.getId(n).slice(2);
info.push(`${i.toString().padStart(2)}: ${nodeDesc(n)}`);
if (n.element) info.push(` key: ${n.element.props.key}`);
info.push(` id: ${niceId}`);
return info.join("\n");
};
let output = `Dependencies:\n ${nodes.map(short).join("\n -> ")}\n`;
nodes.pop();
output += `Details:\n${nodes.map(detail).join("\n")}`;
return output;
}
/**
* Actions always act on behalf of one or more Elements. The Action node
* replaces the Element nodes in the graph. This means that all of the
* Element nodes' dependencies are moved over to the Action node and the
* Element nodes are then removed from the graph.
*/
protected setActingFor(actionNode: EPNode, elNode: EPObject) {
elNode = this.getNode(elNode);
this.successors(elNode).forEach((succ) => {
const e = this.getEdgeInternal(elNode, succ);
if (!e) {
throw new InternalError(`Internal consistency check failed. ` +
`node has a successor, but no edge`);
}
this.removeEdgeInternal(elNode, succ);
// Don't create a circular dependency to the actionNode. This can
// happen when a single Action serves multiple Elements that have
// dependencies between each other. Simply ignore those.
if (actionNode === succ) return;
this.addEdgeInternal(actionNode, succ, e.edgeType);
});
this.predecessors(elNode).forEach((pred) => {
const e = this.getEdgeInternal(pred, elNode);
if (!e) {
throw new InternalError(`Internal consistency check failed. ` +
`node has a predecessor, but no edge`);
}
this.removeEdgeInternal(pred, elNode);
// Don't create a circular dependency to the actionNode. This can
// happen when a single Action serves multiple Elements that have
// dependencies between each other. Simply ignore those.
if (actionNode === pred) return;
this.addEdgeInternal(pred, actionNode, e.edgeType);
});
this.removeNode(elNode);
}
protected isStartStartCycle(cycle: string[]) {
for (let i = 0; i < cycle.length; ++i) {
const e = this.getEdgeInternal(cycle[i], cycle[(i + 1) % cycle.length]);
if (e?.edgeType !== DependencyType.StartStart) return false;
}
return true;
}
protected breakCycle(cycle: string[]) {
debugExecuteDetail(`Breaking StartStart Cycle by removing ${cycle[0]} -> ${cycle[1]}`);
this.removeEdgeInternal(this.getNode(cycle[0]), this.getNode(cycle[1]));
}
}
export function isExecutionPlanImpl(val: any): val is ExecutionPlanImpl {
return isObject(val) && val instanceof ExecutionPlanImpl;
}
function debugExecId(id: string, ...args: any[]) {
debugExecute(`* ${(id as any).padEnd(26)}`, ...args);
}
function debugExecDetailId(id: string, ...args: any[]) {
debugExecuteDetail(`* ${(id as any).padEnd(26)}`, ...args);
}
const defaultExecuteOptions = {
concurrency: Infinity,
ignoreDeleteErrors: false,
dryRun: false,
pollDelayMs: 1000,
timeoutMs: 0,
};
export async function execute(options: ExecuteOptions): Promise<ExecuteComplete> {
const opts = { ...defaultExecuteOptions, ...options };
const plan = opts.plan;
const timeoutTime = opts.timeoutMs ? Date.now() + opts.timeoutMs : 0;
if (!isExecutionPlanImpl(plan)) throw new InternalError(`plan is not an ExecutionPlanImpl`);
const deployOpID = plan.deployOpID;
const nodeStatus = await createStatusTracker({
deployment: plan.deployment,
deployOpID,
dryRun: opts.dryRun,
goalStatus: plan.goalStatus,
nodes: plan.allNodes,
taskObserver: opts.taskObserver,
});
plan.helpers.nodeStatus = nodeStatus;
try {
while (true) {
const stepNum = nodeStatus.stepID ? nodeStatus.stepID.deployStepNum : "DR";
const stepStr = `${deployOpID}.${stepNum}`;
debugExecute(`\n\n-----------------------------\n\n` +
`**** Starting execution step ${stepStr}`);
debugExecute(`\nExecution plan:\n${plan.print()}`);
await executePass({ ...opts, nodeStatus, timeoutTime });
const { stateChanged } = await opts.processStateUpdates();
const ret = await nodeStatus.complete(stateChanged);
debugExecute(`**** execution step ${stepStr} status: ${ret.deploymentStatus}\nSummary:`,
inspect(ret), "\n", nodeStatus.debug(plan.getId), "\n-----------------------------\n\n");
// Keep polling until we're done or the state changes, which means
// we should do a re-build.
if (ret.deploymentStatus === DeployOpStatus.StateChanged ||
isFinalStatus(ret.deploymentStatus)) {
debugExecute(`**** Execution completed`);
return ret;
}
await sleep(opts.pollDelayMs);
}
} catch (err) {
err = ensureError(err);
opts.logger.error(`Deploy operation failed: ${err.message}`);
let stateChanged = false;
try {
const upd = await opts.processStateUpdates();
if (upd.stateChanged) stateChanged = true;
} catch (err2) {
err2 = ensureError(err2);
opts.logger.error(`Error processing state updates during error handling: ${err2.message}`);
}
debugExecute(`**** Execution failed:`, inspect(err));
if (err.name === "TimeoutError") {
await Promise.all(
plan.allNodes
.filter((n) => !nodeIsFinal(n, nodeStatus))
.map((n) => nodeStatus.set(n, DeployStatus.Failed, err))
);
return nodeStatus.complete(stateChanged);
} else {
throw err;
}
}
}
export async function executePass(opts: ExecutePassOptions) {
const { dryRun, logger, nodeStatus, plan } = opts;
if (!isExecutionPlanImpl(plan)) throw new InternalError(`plan is not an ExecutionPlanImpl`);
const locks = new AsyncLock();
const queue = new PQueue({ concurrency: opts.concurrency });
let stopExecuting = false;
const dwQueue = new DeployedWhenQueue(debugExecDetailId);
const fatalErrors: Error[] = [];
// If an action is on behalf of some Elements, those nodes take on
// the status of the action in certain cases.
const signalActingFor = async (node: EPNode, stat: DeployStatusExt, err: Error | undefined) => {
const w = node.waitInfo;
if (!w || !w.actingFor) return;
if (shouldNotifyActingFor(stat)) {
await Promise.all(w.actingFor.map(async (c) => {
const n = plan.getNode(c.element);
if (!nodeStatus.isActive(n)) return;
const s =
err ? err :
stat === DeployStatusExt.Deploying ? DeployStatusExt.ProxyDeploying :
stat === DeployStatusExt.Destroying ? DeployStatusExt.ProxyDestroying :
stat;
await updateStatus(n, s, c.detail);
}));
}
// Queue the actingFor Elements for successful final states so they
// can run their deployedWhen and possibly also transition to a
// final state.
if (shouldQueueActingFor(stat)) {
w.actingFor
.map((c) => plan.getNode(c.element))
.forEach(queueRun);
}
};
const signalPreds = (n: EPNode, stat: DeployStatusExt) => {
let toSignal: EPNode[];
if (isFinalStatus(stat)) {
// Signal predecessors that depend on our Finish
toSignal = plan.predecessors(n, DependencyType.FinishStart)
.concat(plan.predecessors(n, DependencyType.FinishStartHard));
} else if (isInProgress(stat) || isProxying(stat)) {
// Signal predecessors that depend on our Start
toSignal = plan.predecessors(n, DependencyType.StartStart);
} else {
return;
}
// Each toSignal dependency has been satisfied. In successful cases,
// remove all the dependencies onto `n`.
// In the error case, leave dependencies in place so the
// predecessors can use those relationships to realize they depend
// on an errored node and signal their predecessors.
if (stat !== DeployStatus.Failed) {
toSignal.forEach((pred) => plan.removeDep(pred, n));
}
toSignal.forEach(queueRun);
};
const fatalError = (err: any) => {
stopExecuting = true;
fatalErrors.push(ensureError(err));
};
const queueRun = (n: EPNode) => queue.add(() => run(n)).catch(fatalError);
const run = async (n: EPNode) => {
const id = plan.getId(n);
await locks.acquire(id, () => runLocked(n, id));
};
const runLocked = async (n: EPNode, id: EPNodeId) => {
let errorLogged = false;
try {
if (stopExecuting) return debugExecId(id, `TIMED OUT: Can't start task`);
const stat = nodeStatus.get(n);
if (isFinalStatus(stat)) return debugExecId(id, `Already complete`);
if (!(isWaiting(stat) || isInProgress(stat))) {
throw new InternalError(`Unexpected node status ${stat}: ${id}`);
}
if (!dependenciesMet(n, id)) return;
debugExecId(id, ` Dependencies met`);
const w = n.waitInfo;
if (w) {
let errorIgnored = false;
if (!(isInProgress(stat) || isProxying(stat))) {
await updateStatus(n, goalToInProgress(n.goalStatus)); // now in progress
if (w.action) {
debugExecId(id, `ACTION: Doing ${w.description}`);
if (w.logAction) logger.info(`Doing ${w.description}`);
try {
if (!dryRun) await w.action();
} catch (err) {
if (n.goalStatus === GoalStatus.Destroyed && opts.ignoreDeleteErrors) {
errorIgnored = true;
logger.warning(`--Error (ignored) while ${w.description}\n${err}\n----------`);
} else {
logger.error(`--Error while ${w.description}\n${err}\n----------`);
errorLogged = true;
throw err;
}
}
}
}
if (!errorIgnored) {
const wStat = await w.deployedWhen(n.goalStatus);
if (wStat !== true) {
const statStr = waitStatusToString(wStat);
debugExecId(id, `NOT COMPLETE: ${w.description}: ${statStr}`);
nodeStatus.output(n, statStr);
dwQueue.enqueue(n, id, wStat);
return;
}
}
debugExecId(id, `COMPLETE${errorIgnored ? "(error ignored)" : ""}: ${w.description}`);
} else {
debugExecId(id, ` No wait info`);
// Go through normal state transition to
// trigger correct downstream events to TaskObservers.
await updateStatus(n, goalToInProgress(n.goalStatus));
}
await updateStatus(n, n.goalStatus);
plan.removeNode(n);
} catch (err) {
err = ensureError(err);
debugExecId(id, `FAILED: ${err}`);
await updateStatus(n, err);
if (!errorLogged) {
logger.error(`Error while ${goalToInProgress(n.goalStatus).toLowerCase()} ` +
`${nodeDescription(n)}: ${formatUserError(err)}`);
}
if (err.name === "InternalError") throw err;
}
};
const updateStatus = async (n: EPNode, stat: DeployStatusExt | Error,
description?: string): Promise<boolean> => {
if (stopExecuting) return false;
const { err, deployStatus } = isError(stat) ?
{ err: stat, deployStatus: DeployStatus.Failed } :
{ err: undefined, deployStatus: stat };
debugExecId(plan.getId(n), `STATUS: ${deployStatus}${err ? ": " + err : ""}`);
const changed = await nodeStatus.set(n, deployStatus, err, description);
if (changed) {
await signalActingFor(n, deployStatus, err);
signalPreds(n, deployStatus);
if (isFinalStatus(deployStatus) && n.element) {
dwQueue.completed(n.element, queueRun);
}
}
return changed;
};
const mkIdStr = (ids: EPNodeId[]) => ids.join(" > ");
const softDepsReady = (n: EPNode, ids: EPNodeId[]) => {
// If this node is being Deployed, just look at its own WaitInfo
if (n.goalStatus === DeployStatus.Deployed) {
return waitIsReady(n, false, ids);
}
// But if the node is being Destroyed, we instead evaluate all of our
// successors' WaitInfos, each in the inverse direction.
const succs = plan.successors(n, DependencyType.FinishStart)
.concat(plan.successors(n, DependencyType.FinishStartHard));
debugExecDetailId(mkIdStr(ids), ` Evaluating: ${succs.length} successors`);
for (const s of succs) {
// TODO: There probably needs to be a check here comparing
// goalStatus for s and n, similar to addEdge.
const sId = plan.getId(s);
if (!waitIsReady(s, true, [...ids, sId])) return false;
// Evaluate successor's actingFor the same way
const actingFor = plan.actingFor(s);
for (const a of actingFor) {
const aId = plan.getId(a);
if (!waitIsReady(a, true, [...ids, sId, aId])) return false;
}
}
return true;
};
const waitIsReady = (n: EPNode, invert: boolean, ids: EPNodeId[]) => {
const w = n.waitInfo;
let dep = w && w.dependsOn;
if (invert && dep) dep = relationInverse(dep);
if (debugExecute.enabled) {
const idStr = mkIdStr(ids);
const desc = !w ? "no soft dep" :
dep ? `soft dep (${w.description}) - Relation${invert ? " (inverted)" : ""}: ${relationToString(dep)}` :
`no soft dep (${w.description})`;
debugExecDetailId(idStr, ` Evaluating: ${desc}`);
if (!dep) return true;
const relStatus = relationIsReadyStatus(dep);
debugExecId(idStr, ` Relation status:`, relStatus === true ? "READY" : relStatus);
return relStatus === true;
}
return dep ? relationIsReady(dep) : true;
};
const dependenciesMet = (n: EPNode, id: EPNodeId): boolean => {
const hardDeps = plan.successors(n, DependencyType.FinishStartHard);
// Check for errors in our dependencies first. Throws on errored dep.
hardDeps.forEach((d) => nodeIsDeployed(d, id, nodeStatus));
if (hardDeps.length > 0) {
debugExecId(id, `Dependencies not met: ${hardDeps.length} FinishStartHard dependencies remaining`);
return false;
}
if (!softDepsReady(n, [id])) {
debugExecId(id, `Dependencies not met: FinishStart (soft) dependencies remaining`);
return false;
}
const actingFor = plan.actingFor(n);
debugExecDetailId(id, ` Evaluating: ${actingFor.length} actingFor elements`);
for (const a of actingFor) {
const aStat = nodeStatus.get(a);
const aId = plan.getId(a);
if (!isWaiting(aStat)) {
throw new InternalError(`Invalid status ${aStat} for actingFor element ${aId}`);
}
if (!softDepsReady(a, [id, aId])) {
debugExecId(id, `Dependencies not met: actingFor elements have incomplete dependencies`);
return false;
}
}
return true;
};
/*
* Main execute code path
*/
try {
// Queue any non-graph nodes that are already in progress so they
// can check their completions.
plan.nonGraphNodes.filter((n) => nodeIsActive(n, nodeStatus)).forEach(queueRun);
// Queue the leaf nodes that have no dependencies
plan.leaves.forEach(queueRun);
// Then wait for all promises to resolve
let pIdle = queue.onIdle();
if (opts.timeoutMs && opts.timeoutTime) {
const msg = `Deploy operation timed out after ${opts.timeoutMs / 1000} seconds`;
const timeLeft = opts.timeoutTime - Date.now();
if (timeLeft <= 0) throw new pTimeout.TimeoutError(msg);
pIdle = pTimeout(pIdle, timeLeft, msg);
}
await pIdle;
} catch (err) {
fatalError(err);
}
if (fatalErrors.length > 1) throw new MultiError(fatalErrors);
else if (fatalErrors.length === 1) throw fatalErrors[0];
}
function shouldNotifyActingFor(status: DeployStatusExt) {
switch (status) {
case DeployStatus.Deploying:
case DeployStatus.Destroying:
//case DeployStatus.Retrying:
case DeployStatus.Failed:
return true;
default:
return false;
}
}
function shouldQueueActingFor(status: DeployStatusExt) {
switch (status) {
case DeployStatus.Deployed:
case DeployStatus.Destroyed:
return true;
default:
return false;
}
}
function isWaiting(stat: DeployStatusExt) {
return (
stat === DeployStatusExt.Waiting ||
stat === DeployStatusExt.ProxyDeploying ||
stat === DeployStatusExt.ProxyDestroying
);
}
function changeTypeToGoalStatus(ct: ChangeType): GoalStatus {
switch (ct) {
case ChangeType.none:
case ChangeType.create:
case ChangeType.modify:
case ChangeType.replace:
return DeployStatus.Deployed;
case ChangeType.delete:
return DeployStatus.Destroyed;
default:
throw new InternalError(`Bad ChangeType '${ct}'`);
}
}
function toBuiltElemOrWaitInfo(val: Handle | AdaptMountedElement | WaitInfo): AdaptMountedElement | WaitInfo | null {
return isWaitInfo(val) ? val : toBuiltElem(val);
}
function toBuiltElem(val: Handle | AdaptMountedElement): AdaptMountedElement | null {
if (isMountedElement(val)) {
if (val.built()) return val;
val = val.props.handle;
}
if (!isHandle(val)) {
throw new Error(`Attempt to convert an invalid object to Element or WaitInfo: ${inspect(val)}`);
}
const elem = val.nextMounted((el) => isMountedElement(el) && el.built());
if (elem === undefined) throw new InternalError("Handle has no built Element!");
return elem;
}
function nodeIsDeployed(n: EPNode, id: EPNodeId, tracker: StatusTracker): boolean {
const sStat = tracker.get(n);
if (sStat === n.goalStatus) return true; // Dependency met
if (sStat === DeployStatusExt.Failed) {
throw new UserError(`A dependency failed to deploy successfully`);
}
if (isWaiting(sStat) || isInProgress(sStat)) return false;
throw new InternalError(`Invalid status ${sStat} for ${id}`);
}
function nodeDescription(n: EPNode): string {
if (n.waitInfo) return n.waitInfo.description;
if (n.element) return `${n.element.componentName} (id=${n.element.id})`;
return "Unknown node";
}
function nodeIsActive(n: EPNode, tracker: StatusTracker): boolean {
const sStat = tracker.get(n);
return isInProgress(sStat) || isProxying(sStat);
}
function nodeIsFinal(n: EPNode, tracker: StatusTracker): boolean {
const sStat = tracker.get(n);
return isFinalStatus(sStat);
}
function idOrObjInfo(idOrObj: EPNodeId | EPObject) {
return typeof idOrObj === "string" ? idOrObj :
isWaitInfo(idOrObj) ? idOrObj.description :
isMountedElement(idOrObj) ? idOrObj.id :
idOrObj.element ? idOrObj.element.id :
idOrObj.waitInfo ? idOrObj.waitInfo.description :
"unknown";
}
class DeployHelpersFactory {
elementStatus: <S extends Status = Status>(handle: Handle) => Promise<S | Status | undefined>;
protected nodeStatus_: StatusTracker | null = null;
constructor(protected plan: ExecutionPlanImpl, deployment: Deployment) {
this.elementStatus = makeElementStatus();
}
get nodeStatus() {
if (this.nodeStatus_ == null) {
throw new Error(`Cannot get nodeStatus except during plan execution`);
}
return this.nodeStatus_;
}
set nodeStatus(t: StatusTracker) {
this.nodeStatus_ = t;
}
isDeployed = (d: Dependency) => {
if (isRelation(d)) return relationIsReady(d);
const elOrWait = toBuiltElemOrWaitInfo(d);
if (elOrWait === null) return true; // Handle built to null - null is deployed
const n = this.plan.getNode(elOrWait);
return nodeIsDeployed(n, this.plan.getId(n), this.nodeStatus);
}
makeDependsOn = (current: Handle) => (hands: Handle | Handle[]): Relation => {
const toEdge = (h: Handle) => Edge(current, h, this.isDeployed);
return And(...toArray(hands).map(toEdge));
}
create = (elem: AdaptMountedElement): DeployHelpers => ({
elementStatus: this.elementStatus,
isDeployed: this.isDeployed,
dependsOn: this.makeDependsOn(elem.props.handle),
})
} | the_stack |
import { DTGComponentPropertySchema } from '../typing';
const AlignConfigs = {
'ui:type': 'radio',
'ui:props': {
options: [
{ label: '左对齐', value: 'flex-start' },
{ label: '右对齐', value: 'flex-end' },
{ label: '居中对齐', value: 'center' },
{ label: '两端对齐', value: 'space-between' },
{ label: '等间对齐', value: 'space-around' },
],
},
};
const SlotItemConfigs = [
{
name: 'type',
'ui:title': '类型',
'ui:type': 'select',
'ui:props': {
options: [
{ label: '列展示筛选器', value: 'display-column-selector' },
{ label: '空白栏', value: 'spacer' },
{ label: '文本框', value: 'text' },
{ label: '自定义插槽', value: 'slot' },
{ label: '搜索框', value: 'search' },
{ label: '添加按钮', value: 'insert-button' },
],
},
},
{
name: 'selectorButtonType',
'ui:title': '列选择器按钮类型',
'ui:type': 'radio',
'ui:props': {
options: [
{ label: '主按钮', value: 'primary' },
{ label: '虚线按钮', value: 'dashed' },
],
},
type: 'string',
default: 'primary',
visible: (value: string, formData?: Record<string, unknown>) => formData?.type === 'display-column-selector',
},
{
name: 'selectorButtonText',
'ui:title': '列选择器按钮名称',
'ui:type': 'input',
'ui:props': {
placeholder: '请输入按钮名称',
},
type: 'string',
visible: (value: string, formData?: Record<string, unknown>) => formData?.type === 'display-column-selector',
},
{
name: 'style.width',
'ui:title': '宽度',
'ui:type': 'input',
'ui:props': {
placeholder: '请输入宽度',
},
type: 'string',
visible: (value: string, formData?: Record<string, unknown>) => formData?.type === 'spacer',
},
{
name: 'slot',
'ui:title': '插槽名称',
'ui:type': 'auto-complete',
'ui:props': {
placeholder: '请选择插槽名称',
optionsParam: '$$SLOT_NAME_OPTIONS$$',
},
type: 'string',
visible: (value: string, formData?: Record<string, unknown>) => formData?.type === 'slot',
},
{
name: 'text',
'ui:title': '标题名称',
'ui:type': 'input',
'ui:props': {
placeholder: '请输入标题名称',
},
type: 'string',
visible: (value: string, formData?: Record<string, unknown>) => formData?.type === 'text',
},
{
name: 'align',
'ui:title': '对齐方式',
...AlignConfigs,
type: 'string',
visible: (value: string, formData?: Record<string, unknown>) =>
formData?.type === 'text' || formData?.type === 'search' || formData?.type === 'insert-button',
},
{
name: 'span',
'ui:title': '跨度',
'ui:type': 'input',
'ui:props': {
placeholder: '请输入"1-24", 或者"flex-auto"',
},
type: 'string',
visible: (value: string, formData?: Record<string, unknown>) => formData?.type === 'text',
},
{
name: 'wrapperStyle.width',
'ui:title': '搜索框宽度',
'ui:type': 'input',
'ui:props': {
placeholder: '请输入宽度',
},
type: 'string',
visible: (value: string, formData?: Record<string, unknown>) => formData?.type === 'search',
},
{
name: 'placeholder',
'ui:title': '搜索框提示语',
'ui:type': 'input',
'ui:props': {
placeholder: '请输入提示语',
},
type: 'string',
visible: (value: string, formData?: Record<string, unknown>) => formData?.type === 'search',
},
{
name: 'allowClear',
'ui:title': '是否支持清空',
'ui:type': 'switch',
'ui:props': {},
type: 'boolean',
visible: (value: string, formData?: Record<string, unknown>) => formData?.type === 'search',
},
{
name: 'searchButtonText',
'ui:title': '搜索按钮文本',
'ui:type': 'input',
'ui:props': {
placeholder: '请输入',
},
type: 'string',
visible: (value: string, formData?: Record<string, unknown>) => formData?.type === 'search',
},
{
name: 'searchKeys',
'ui:title': '搜索分类过滤',
'ui:type': 'array-list',
'ui:props': {
items: [
{
name: 'label',
'ui:title': '文案',
'ui:type': 'input',
default: '',
},
{
name: 'value',
'ui:title': '值',
'ui:type': 'input',
default: '',
},
],
},
type: 'array',
visible: (value: string, formData?: Record<string, unknown>) => formData?.type === 'search',
},
{
name: 'searchKeyDefaultValue',
'ui:title': '默认搜索维度',
'ui:type': 'input',
'ui:props': {
placeholder: '请输入搜索维度',
},
type: 'string',
visible: (value: string, formData?: Record<string, unknown>) => formData?.type === 'search',
},
{
name: 'insertButtonText',
'ui:title': '添加按钮文案',
'ui:type': 'input',
'ui:props': {
placeholder: '请输入',
},
type: 'string',
visible: (value: string, formData?: Record<string, unknown>) => formData?.type === 'insert-button',
},
{
name: 'showIcon',
'ui:title': '是否展示图标',
'ui:type': 'switch',
'ui:props': {},
type: 'boolean',
visible: (value: string, formData?: Record<string, unknown>) => formData?.type === 'insert-button',
},
];
const HeaderAttrConfigs: DTGComponentPropertySchema[] = [
{
name: 'header',
group: '头部设置',
'ui:title': '是否展示头部',
'ui:type': 'switch',
'ui:layout': {
labelCol: 6,
wrapperCol: 18,
},
'ui:props': {},
type: 'boolean',
default: false,
},
{
name: 'header.items',
group: '头部设置',
'ui:title': '头部插槽配置',
'ui:type': 'array-list',
'ui:layout': {
labelCol: 6,
wrapperCol: 18,
},
'ui:props': {
items: [...SlotItemConfigs],
},
type: 'boolean',
default: false,
visible: (value: unknown, formData?: Record<string, unknown>) => !!formData?.header,
},
];
const FooterAttrConfigs: DTGComponentPropertySchema[] = [
{
name: 'footer',
group: '底部设置',
'ui:title': '是否展示底部',
'ui:type': 'switch',
'ui:layout': {
labelCol: 6,
wrapperCol: 18,
},
'ui:props': {},
type: 'boolean',
default: false,
},
{
name: 'footer.items',
group: '底部设置',
'ui:title': '底部插槽配置',
'ui:type': 'array-list',
'ui:layout': {
labelCol: 6,
wrapperCol: 18,
},
'ui:props': {
items: [...SlotItemConfigs],
},
type: 'boolean',
default: false,
visible: (value: unknown, formData?: Record<string, unknown>) => !!formData?.footer,
},
];
const PaginationAttrConfigs: DTGComponentPropertySchema[] = [
{
name: 'pagination',
group: '分页配置',
'ui:title': '是否展示分页',
'ui:type': 'switch',
'ui:props': {},
type: 'boolean',
default: false,
},
{
name: 'pagination.position',
group: '分页配置',
'ui:title': '分页位置',
'ui:type': 'radio',
'ui:props': {
options: [
{ label: '左下角', value: 'bottomLeft' },
{ label: '正下方', value: 'bottomCenter' },
{ label: '右下角', value: 'bottomRight' },
],
},
type: 'string',
visible: (value: unknown, formData?: Record<string, unknown>) => formData?.pagination === true,
},
{
name: 'pagination.size',
group: '分页配置',
'ui:title': '分页器尺寸',
'ui:type': 'radio',
'ui:props': {
options: [
{ label: '默认', value: 'default' },
{ label: '小号', value: 'small' },
],
},
type: 'string',
default: 'default',
visible: (value: unknown, formData?: Record<string, unknown>) => formData?.pagination === true,
},
{
name: 'pagination.pageSize',
group: '分页配置',
'ui:title': '每页行数',
'ui:type': 'number',
'ui:props': {
placeholder: '请输入',
},
type: 'number',
minimum: 1,
visible: (value: unknown, formData?: Record<string, unknown>) => formData?.pagination === true,
},
{
name: 'pagination.showQuickJumper',
group: '分页配置',
'ui:title': '是否展示快速跳转',
'ui:type': 'switch',
'ui:description': {
type: 'icon',
trigger: 'hover',
title: '是否可以快速跳转至某页',
},
'ui:props': {},
type: 'boolean',
default: false,
visible: (value: unknown, formData?: Record<string, unknown>) => formData?.pagination === true,
},
{
name: 'pagination.showSizeChanger',
group: '分页配置',
'ui:title': '是否展示切换器',
'ui:description': {
type: 'text',
trigger: 'hover',
title: '是否展示 pageSize 切换器,当 total 大于 50 时默认为 true',
},
'ui:type': 'switch',
'ui:props': {},
type: 'boolean',
default: false,
visible: (value: unknown, formData?: Record<string, unknown>) => formData?.pagination === true,
},
];
export const GlobalAttrFormConfigs: DTGComponentPropertySchema[] = [
{
name: 'size',
group: '全局属性',
'ui:title': '表格尺寸',
'ui:type': 'radio',
'ui:props': {
options: [
{ label: '大号', value: 'large' },
{ label: '中等', value: 'middle' },
{ label: '小号', value: 'small' },
],
},
type: 'string',
default: 'middle',
},
{
name: 'bordered',
group: '全局属性',
'ui:title': '是否展示边框',
'ui:type': 'switch',
'ui:props': {},
type: 'boolean',
default: false,
},
{
name: 'sticky',
group: '全局属性',
'ui:title': '是否粘性头部',
'ui:type': 'switch',
'ui:props': {},
type: 'boolean',
default: false,
},
{
name: 'rowSelection',
group: '全局属性',
'ui:title': '行是否可选择',
'ui:type': 'switch',
'ui:props': {},
type: 'boolean',
default: false,
},
{
name: 'virtual',
group: '全局属性',
'ui:title': '是否进入虚拟列表',
'ui:type': 'switch',
'ui:props': {},
type: 'boolean',
default: false,
},
{
name: 'scrollX',
group: '全局属性',
'ui:title': '水平滚动宽度限制',
'ui:type': 'number',
'ui:props': {
min: 1,
},
type: 'number',
},
{
name: 'scrollY',
group: '全局属性',
'ui:title': '虚拟滚动高度',
'ui:type': 'number',
'ui:props': {
min: 1,
},
type: 'number',
visible: (value: unknown, formData?: Record<string, unknown>) => formData?.virtual === true,
},
...HeaderAttrConfigs,
...PaginationAttrConfigs,
...FooterAttrConfigs,
];
export const defaultComponentIcon = `
<svg t="1627123986728" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="3506" width="48" height="48"><path d="M917.376 243.456L500.8 4.8a34.144 34.144 0 0 0-34.336 0.096l0.16-0.096L50.112 243.52a33.952 33.952 0 0 0-17.088 29.376v477.44c0 12.16 6.528 23.296 17.088 29.44l416.512 238.72a34.88 34.88 0 0 0 34.336-0.064l-0.16 0.096 416.576-238.72c10.272-5.952 17.088-16.896 17.088-29.44V272.928c0-12.544-6.816-23.488-16.928-29.344l-0.16-0.096z m-51.264 487.36l-382.4 219.136-382.336-219.136V292.48l382.336-219.136 382.4 219.136v438.272zM198.784 360.512a33.76 33.76 0 0 0 12.384 46.304l0.16 0.096 237.824 136.32V812.8c0 18.816 15.232 33.92 34.176 33.92h0.256a33.92 33.92 0 0 0 33.92-33.92v-269.184l238.656-136.832a33.92 33.92 0 0 0 12.48-46.528l0.096 0.16a34.4 34.4 0 0 0-46.88-12.32l0.16-0.096-238.272 136.512-238.208-136.512a34.464 34.464 0 0 0-46.624 12.384l-0.096 0.16z" p-id="3507"></path></svg>
`; | the_stack |
import { Utils } from '../utils';
import { remoteCamera } from '../../src/private/remoteCamera';
import { _initialize, _uninitialize } from '../../src/public/publicAPIs';
import { SdkError } from '../../src/public/interfaces';
describe('remoteCamera', () => {
const utils = new Utils();
const capableParticipantsMock: remoteCamera.Participant[] = [
{
id: '1',
displayName: 'Nicole',
},
{
id: '2',
displayName: 'Mrudula',
},
];
const participantMock: remoteCamera.Participant = { id: '1' };
const controlCommandMock: remoteCamera.ControlCommand = remoteCamera.ControlCommand.PanRight;
const errorReasonMock: remoteCamera.ErrorReason = remoteCamera.ErrorReason.CommandPanRightError;
const deviceStateChangeMock: remoteCamera.DeviceState = {
available: false,
error: false,
reset: false,
zoomIn: false,
zoomOut: false,
panLeft: false,
panRight: false,
tiltUp: false,
tiltDown: false,
};
const sessionStatusChangeMock: remoteCamera.SessionStatus = {
inControl: true,
};
beforeEach(() => {
utils.messages = [];
_initialize(utils.mockWindow);
});
afterEach(() => {
if (_uninitialize) {
_uninitialize();
}
});
describe('getCapableParticipants', () => {
it('should not allow calls before initialization', () => {
expect(() => remoteCamera.getCapableParticipants(() => {})).toThrowError(
'The library has not yet been initialized',
);
});
it('should throw an error if the callback function is null', () => {
expect(() => remoteCamera.getCapableParticipants(null)).toThrowError(
'[remoteCamera.getCapableParticipants] Callback cannot be null',
);
});
it('should successfully get list of participants with controllable cameras', () => {
utils.initializeWithContext('sidePanel');
let returnedParticipant: remoteCamera.Participant[] | null = null;
let returnedSdkError: SdkError | null = null;
const callback = (sdkError: SdkError | null, participants: remoteCamera.Participant[] | null): void => {
returnedParticipant = participants;
returnedSdkError = sdkError;
};
remoteCamera.getCapableParticipants(callback);
let message = utils.findMessageByFunc('remoteCamera.getCapableParticipants');
expect(message).not.toBeUndefined();
// simulate response
const data = {
error: null,
participants: capableParticipantsMock,
};
utils.respondToMessage(message, data.error, data.participants);
// check data is returned properly
expect(returnedParticipant).toEqual(capableParticipantsMock);
expect(returnedSdkError).toBeNull();
});
it('should return an error object if response has error', () => {
utils.initializeWithContext('sidePanel');
let returnedParticipant: remoteCamera.Participant[] | null = null;
let returnedSdkError: SdkError | null = null;
const sdkErrorMock: SdkError = {
errorCode: 500,
message: 'Test error message.',
};
const callback = (sdkError: SdkError | null, participants: remoteCamera.Participant[] | null): void => {
returnedParticipant = participants;
returnedSdkError = sdkError;
};
remoteCamera.getCapableParticipants(callback);
let message = utils.findMessageByFunc('remoteCamera.getCapableParticipants');
expect(message).not.toBeUndefined();
// simulate response
const data = {
error: sdkErrorMock,
participants: null,
};
utils.respondToMessage(message, data.error, data.participants);
// check data is returned properly
expect(returnedParticipant).toBeNull();
expect(returnedSdkError).toEqual(sdkErrorMock);
});
});
describe('requestControl', () => {
it('should not allow calls before initialization', () => {
expect(() => remoteCamera.requestControl(participantMock, () => {})).toThrowError(
'The library has not yet been initialized',
);
});
it('should throw an error if the participant is null', () => {
expect(() => remoteCamera.requestControl(null, () => {})).toThrowError(
'[remoteCamera.requestControl] Participant cannot be null',
);
});
it('should throw an error if the callback function is null', () => {
expect(() => remoteCamera.requestControl(participantMock, null)).toThrowError(
'[remoteCamera.requestControl] Callback cannot be null',
);
});
it('should request control of remote camera', () => {
utils.initializeWithContext('sidePanel');
let returnedRequestResponse: boolean | null = null;
let returnedSdkError: SdkError | null = null;
const callbackMock = (sdkError: SdkError | null, requestResult: boolean | null): void => {
returnedRequestResponse = requestResult;
returnedSdkError = sdkError;
};
remoteCamera.requestControl(participantMock, callbackMock);
let message = utils.findMessageByFunc('remoteCamera.requestControl');
expect(message).not.toBeUndefined();
expect(message.args).toContain(participantMock);
// simulate response
const data = {
error: null,
requestResult: true,
};
utils.respondToMessage(message, data.error, data.requestResult);
// check data is returned properly
expect(returnedRequestResponse).toEqual(true);
expect(returnedSdkError).toBeNull();
});
it('should return an error object if response has error', () => {
utils.initializeWithContext('sidePanel');
let returnedRequestResponse: boolean | null = null;
let returnedSdkError: SdkError | null = null;
const sdkErrorMock: SdkError = {
errorCode: 500,
message: 'Test error message.',
};
const callbackMock = (sdkError: SdkError | null, requestResult: boolean | null): void => {
returnedRequestResponse = requestResult;
returnedSdkError = sdkError;
};
remoteCamera.requestControl(participantMock, callbackMock);
let message = utils.findMessageByFunc('remoteCamera.requestControl');
expect(message).not.toBeUndefined();
// simulate response
const data = {
error: sdkErrorMock,
requestResult: null,
};
utils.respondToMessage(message, data.error, data.requestResult);
// check data is returned properly
expect(returnedRequestResponse).toBeNull();
expect(returnedSdkError).toEqual(sdkErrorMock);
});
});
describe('sendControlCommand', () => {
it('should not allow calls before initialization', () => {
expect(() => remoteCamera.sendControlCommand(controlCommandMock, () => {})).toThrowError(
'The library has not yet been initialized',
);
});
it('should throw an error if the ControlCommand is null', () => {
expect(() => remoteCamera.sendControlCommand(null, () => {})).toThrowError(
'[remoteCamera.sendControlCommand] ControlCommand cannot be null',
);
});
it('should throw an error if the callback function is null', () => {
expect(() => remoteCamera.sendControlCommand(controlCommandMock, null)).toThrowError(
'[remoteCamera.sendControlCommand] Callback cannot be null',
);
});
it('should send control command to the remote camera', () => {
utils.initializeWithContext('sidePanel');
let returnedSdkError: SdkError | null;
const callbackMock = (sdkError: SdkError | null): void => {
returnedSdkError = sdkError;
};
remoteCamera.sendControlCommand(controlCommandMock, callbackMock);
let message = utils.findMessageByFunc('remoteCamera.sendControlCommand');
expect(message).not.toBeUndefined();
expect(message.args).toContain(controlCommandMock);
// simulate response
const data = {
error: null,
};
utils.respondToMessage(message, data.error);
// check data is returned properly
expect(returnedSdkError).toBeNull();
});
it('should return an error object if response has error', () => {
utils.initializeWithContext('sidePanel');
let returnedSdkError: SdkError | null;
const sdkErrorMock: SdkError = {
errorCode: 500,
message: 'Test error message.',
};
const callbackMock = (sdkError: SdkError | null): void => {
returnedSdkError = sdkError;
};
remoteCamera.sendControlCommand(controlCommandMock, callbackMock);
let message = utils.findMessageByFunc('remoteCamera.sendControlCommand');
expect(message).not.toBeUndefined();
// simulate response
const data = {
error: sdkErrorMock,
};
utils.respondToMessage(message, data.error);
// check data is returned properly
expect(returnedSdkError).toEqual(sdkErrorMock);
});
});
describe('terminateSession', () => {
it('should not allow calls before initialization', () => {
expect(() => remoteCamera.terminateSession(() => {})).toThrowError('The library has not yet been initialized');
});
it('should throw an error if the callback function is null', () => {
expect(() => remoteCamera.terminateSession(null)).toThrowError(
'[remoteCamera.terminateSession] Callback cannot be null',
);
});
it('should terminate remote camera control session', () => {
utils.initializeWithContext('sidePanel');
let returnedSdkError: SdkError | null;
const callback = (sdkError: SdkError | null): void => {
returnedSdkError = sdkError;
};
remoteCamera.terminateSession(callback);
let message = utils.findMessageByFunc('remoteCamera.terminateSession');
expect(message).not.toBeUndefined();
// simulate response
const data = {
error: null,
};
utils.respondToMessage(message, data.error);
// check data is returned properly
expect(returnedSdkError).toBeNull();
});
it('should return an error object if response has error', () => {
utils.initializeWithContext('sidePanel');
let returnedSdkError: SdkError | null;
const sdkErrorMock: SdkError = {
errorCode: 500,
message: 'Test error message.',
};
const callback = (sdkError: SdkError | null): void => {
returnedSdkError = sdkError;
};
remoteCamera.terminateSession(callback);
let message = utils.findMessageByFunc('remoteCamera.terminateSession');
expect(message).not.toBeUndefined();
// simulate response
const data = {
error: sdkErrorMock,
};
utils.respondToMessage(message, data.error);
// check data is returned properly
expect(returnedSdkError).toEqual(sdkErrorMock);
});
});
describe('registerOnCapableParticipantsChangeHandler', () => {
it('should not allow calls before initialization ', () => {
expect(() => remoteCamera.registerOnCapableParticipantsChangeHandler(() => {})).toThrowError(
'The library has not yet been initialized',
);
});
it('should not allow calls with null handler ', () => {
utils.initializeWithContext('sidePanel');
expect(() => remoteCamera.registerOnCapableParticipantsChangeHandler(null)).toThrowError(
'[remoteCamera.registerOnCapableParticipantsChangeHandler] Handler cannot be null',
);
});
it('should successfully register a handler for when the capable participants change', () => {
utils.initializeWithContext('sidePanel');
let handlerInvoked = false;
let CapableParticipants: remoteCamera.Participant[];
const handlerMock = (participantChange: remoteCamera.Participant[]): void => {
handlerInvoked = true;
CapableParticipants = participantChange;
};
remoteCamera.registerOnCapableParticipantsChangeHandler(handlerMock);
utils.sendMessage('remoteCamera.capableParticipantsChange', capableParticipantsMock);
expect(handlerInvoked).toEqual(true);
expect(CapableParticipants).toEqual(capableParticipantsMock);
});
});
describe('registerOnErrorHandler', () => {
it('should not allow calls before initialization ', () => {
expect(() => remoteCamera.registerOnErrorHandler(() => {})).toThrowError(
'The library has not yet been initialized',
);
});
it('should not allow calls with null handler ', () => {
utils.initializeWithContext('sidePanel');
expect(() => remoteCamera.registerOnErrorHandler(null)).toThrowError(
'[remoteCamera.registerOnErrorHandler] Handler cannot be null',
);
});
it('should successfully register a handler for when the handler encounters an error', () => {
utils.initializeWithContext('sidePanel');
let handlerInvoked = false;
let handlerError: remoteCamera.ErrorReason;
const handlerMock = (error: remoteCamera.ErrorReason): void => {
handlerInvoked = true;
handlerError = error;
};
remoteCamera.registerOnErrorHandler(handlerMock);
utils.sendMessage('remoteCamera.handlerError', errorReasonMock);
expect(handlerInvoked).toEqual(true);
expect(handlerError).toEqual(errorReasonMock);
});
});
describe('registerOnDeviceStateChangeHandler', () => {
it('should not allow calls before initialization ', () => {
expect(() => remoteCamera.registerOnDeviceStateChangeHandler(() => {})).toThrowError(
'The library has not yet been initialized',
);
});
it('should not allow calls with null handler ', () => {
utils.initializeWithContext('sidePanel');
expect(() => remoteCamera.registerOnDeviceStateChangeHandler(null)).toThrowError(
'[remoteCamera.registerOnDeviceStateChangeHandler] Handler cannot be null',
);
});
it('should successfully register a handler for when the device state changes', () => {
utils.initializeWithContext('sidePanel');
let handlerInvoked = false;
let deviceState: remoteCamera.DeviceState;
const handlerMock = (deviceStateChange: remoteCamera.DeviceState): void => {
handlerInvoked = true;
deviceState = deviceStateChange;
};
remoteCamera.registerOnDeviceStateChangeHandler(handlerMock);
utils.sendMessage('remoteCamera.deviceStateChange', deviceStateChangeMock);
expect(handlerInvoked).toEqual(true);
expect(deviceState).toEqual(deviceStateChangeMock);
});
});
describe('registerOnSessionStatusChangeHandler', () => {
it('should not allow calls before initialization ', () => {
expect(() => remoteCamera.registerOnSessionStatusChangeHandler(() => {})).toThrowError(
'The library has not yet been initialized',
);
});
it('should not allow calls with null handler ', () => {
utils.initializeWithContext('sidePanel');
expect(() => remoteCamera.registerOnSessionStatusChangeHandler(null)).toThrowError(
'[remoteCamera.registerOnSessionStatusChangeHandler] Handler cannot be null',
);
});
it('should successfully register a handler for when the session status changes', () => {
utils.initializeWithContext('sidePanel');
let handlerInvoked = false;
let sessionStatus: remoteCamera.SessionStatus;
const handlerMock = (sessionStatusChange: remoteCamera.SessionStatus): void => {
handlerInvoked = true;
sessionStatus = sessionStatusChange;
};
remoteCamera.registerOnSessionStatusChangeHandler(handlerMock);
utils.sendMessage('remoteCamera.sessionStatusChange', sessionStatusChangeMock);
expect(handlerInvoked).toEqual(true);
expect(sessionStatus).toEqual(sessionStatusChangeMock);
});
});
}); | the_stack |
import path from "path";
import { isInteractive as globaIsInteractive } from "@best/utils";
import chalk from "chalk";
import trimPath from "./utils/trim-path";
import countEOL from "./utils/count-eod";
import { ProxiedStream, proxyStream } from "./utils/proxy-stream";
import {
BenchmarkRuntimeConfig,
RunnerStream,
BenchmarksBundle,
BenchmarkUpdateState,
} from "@best/types";
enum State {
QUEUED = 'QUEUED',
RUNNING = 'RUNNING',
DONE = 'DONE',
ERROR = 'ERROR',
}
interface BenchmarkStatus { state: State; displayPath: string; projectName: string, progress?: BenchmarkProgress }
type AllBencharkRunnerState = Map<string, BenchmarkStatus>
interface BenchmarkProgress {
executedIterations: number,
estimated: number,
runtime: number,
avgIteration: number,
}
const STATE_ANSI = {
RUNNING: chalk.reset.inverse.yellow.bold(` ${State.RUNNING} `),
QUEUED: chalk.reset.inverse.gray.bold(` ${State.QUEUED} `),
ERROR: chalk.reset.inverse.redBright.bold(` ${State.ERROR} `),
DONE: chalk.reset.inverse.green.bold(` ${State.DONE} `),
};
const INIT_MSG = '\n Running benchmarks... \n\n';
const PROGRESS_TEXT = chalk.dim('Progress running: ');
const PROGRESS_BAR_WIDTH = 40;
const DEFAULT_TIMEOUT = 60;
function printState(state: State) {
return STATE_ANSI[state];
}
function printDisplayName(displayPath: string, overflow: number) {
const dirname = overflow ? trimPath(path.dirname(displayPath), overflow) : path.dirname(displayPath);
const basename = path.basename(displayPath);
return chalk.dim(dirname + path.sep) + chalk.bold(basename);
}
function printProjectName(projectName: string) {
return ' ' + chalk.reset.cyan.dim(`(${projectName})`);
}
function calculateBenchmarkProgress(progress: BenchmarkUpdateState, { iterations, maxDuration, minSampleCount }: BenchmarkRuntimeConfig): BenchmarkProgress {
const { executedIterations, executedTime } = progress;
const avgIteration = executedTime / executedIterations;
const runtime = parseInt((executedTime / 1000) + '', 10);
let estimated: number;
if (iterations) {
estimated = Math.round(iterations * avgIteration / 1000) + 1;
} else if (avgIteration * minSampleCount > maxDuration) {
estimated = Math.round(minSampleCount * avgIteration / 1000) + 1;
} else {
estimated = maxDuration / 1000;
}
return {
executedIterations,
estimated,
runtime,
avgIteration,
};
}
function printProgressBar(runTime: number, estimatedTime: number, width: number) {
// If we are more than one second over the estimated time, highlight it.
const renderedTime = estimatedTime && runTime >= estimatedTime + 1 ? chalk.bold.yellow(runTime + 's') : runTime + 's';
let time = chalk.bold(`Time:`) + ` ${renderedTime}`;
if (runTime < estimatedTime) {
time += `, estimated ${estimatedTime}s`;
}
// Only show a progress bar if the test run is actually going to take some time
if (estimatedTime > 2 && runTime < estimatedTime && width) {
const availableWidth = Math.min(PROGRESS_BAR_WIDTH, width);
const length = Math.min(Math.floor(runTime / estimatedTime * availableWidth), availableWidth);
if (availableWidth >= 2) {
time += '\n' + chalk.green('█').repeat(length) + chalk.white('█').repeat(availableWidth - length);
}
}
return time;
}
export default class RunnerOutputStream implements RunnerStream {
stdoutColumns: number;
stdoutWrite: Function;
isInteractive: boolean;
_streamBuffer: string = '';
_state: AllBencharkRunnerState;
_innerLog: string = '';
_scheduled: NodeJS.Timeout | null = null;
_proxyStream: ProxiedStream;
constructor(buildConfig: BenchmarksBundle[], stream: NodeJS.WriteStream, isInteractive?: boolean) {
this.stdoutColumns = stream.columns || 80;
this.stdoutWrite = stream.write.bind(stream);
this.isInteractive = isInteractive !== undefined ? isInteractive : globaIsInteractive;
this._state = this.initState(buildConfig);
this._proxyStream = proxyStream(stream, this.isInteractive);
}
initState(buildConfigs: BenchmarksBundle[]): AllBencharkRunnerState {
return buildConfigs.reduce((state: AllBencharkRunnerState, benchmarkBundle: BenchmarksBundle): AllBencharkRunnerState => {
benchmarkBundle.benchmarkBuilds.forEach(({ benchmarkEntry, benchmarkSignature, projectConfig: { projectName }}) => {
state.set(benchmarkSignature, {
projectName,
state: State.QUEUED,
displayPath: benchmarkEntry,
});
});
return state;
}, new Map());
}
clearBufferStream() {
let buffer = this._streamBuffer;
const lines = countEOL(buffer);
if (lines) {
buffer = '\r\x1B[K\r\x1B[1A'.repeat(lines);
}
if (this.isInteractive) {
// clear last line
this.stdoutWrite('\x1b[999D\x1b[K');
}
this.stdoutWrite(buffer);
this._streamBuffer = '';
}
writeBufferStream(str: string) {
this._streamBuffer += str;
this.stdoutWrite(str);
}
updateRunnerState(benchmarkSignature: string, state: State) {
const stateConfig = this._state.get(benchmarkSignature);
if (!stateConfig) {
throw new Error(`Unknown benchmark build started (${benchmarkSignature})`);
}
if (stateConfig.state !== State.ERROR) {
stateConfig.state = state;
}
}
scheduleUpdate(time?: number, fn?: Function) {
if (!this._scheduled) {
this._scheduled = setTimeout(() => {
fn ? fn() : this.updateStream();
this._scheduled = null;
}, time || DEFAULT_TIMEOUT);
}
}
printBenchmarkState({ state, projectName, displayPath }: { state: State, projectName: string, displayPath: string }) {
const columns = this.stdoutColumns;
const overflow = columns - (state.length + projectName.length + displayPath.length + /* for padding */ 14);
const hasOverflow = overflow < 0;
const ansiState = printState(state);
const ansiProjectName = printProjectName(projectName);
const ansiDisplayname = printDisplayName(displayPath, hasOverflow ? Math.abs(overflow): 0);
return `${ansiState} ${ansiProjectName} ${ansiDisplayname}\n`;
}
printProgress(progress: BenchmarkProgress, { displayPath }: BenchmarkStatus): string {
const benchmarkName = chalk.bold.black(path.basename(displayPath));
return [
`\n${PROGRESS_TEXT} ${benchmarkName}`,
chalk.bold.black('Avg iteration: ') + progress.avgIteration.toFixed(2) + 'ms',
chalk.bold.black('Completed iterations: ') + progress.executedIterations,
printProgressBar(progress.runtime, progress.estimated, 40)
].join('\n') + '\n\n';
}
updateStream() {
let buffer = INIT_MSG;
let progressStr: string = '';
for (const benchmarkState of this._state.values()) {
const { state, displayPath, projectName, progress } = benchmarkState;
buffer += this.printBenchmarkState({ state, displayPath, projectName });
if (state === State.RUNNING && progress) {
progressStr += this.printProgress(progress, benchmarkState);
}
}
const streamProxyBuffer = this._proxyStream.readBuffer();
streamProxyBuffer ? `Buffered console logs:\n ${streamProxyBuffer}` : '';
this.clearBufferStream();
this.writeBufferStream(buffer + progressStr + streamProxyBuffer);
}
log(message: string) {
this._innerLog = message;
if (this.isInteractive) {
this.scheduleUpdate();
} else {
this.stdoutWrite(` :: ${message}\n`);
}
}
_clearTimeout() {
if (this._scheduled) {
clearTimeout(this._scheduled);
this._scheduled = null;
}
}
// -- Lifecycle
onBenchmarkStart(benchmarkSignature: string) {
this.updateRunnerState(benchmarkSignature, State.RUNNING);
if (this.isInteractive) {
this.scheduleUpdate();
} else {
const benchmarkState = this._state.get(benchmarkSignature);
if (benchmarkState) {
this.stdoutWrite(this.printBenchmarkState(benchmarkState));
}
}
}
onBenchmarkEnd(benchmarkSignature: string) {
this.updateRunnerState(benchmarkSignature, State.DONE);
this._innerLog = '';
const benchmarkState = this._state.get(benchmarkSignature);
if (benchmarkState) {
if (this.isInteractive) {
if (benchmarkState.state === State.ERROR) {
this.updateStream();
this.stdoutWrite('\n');
} else {
this.scheduleUpdate();
}
} else {
this._clearTimeout();
this.stdoutWrite(this.printBenchmarkState(benchmarkState) + '\n');
}
}
}
onBenchmarkError(benchmarkSignature: string) {
this.updateRunnerState(benchmarkSignature, State.ERROR);
}
updateBenchmarkProgress(benchmarkSignature: string, updatedBenchmarkState: BenchmarkUpdateState, runtimeOpts: BenchmarkRuntimeConfig) {
const progress = calculateBenchmarkProgress(updatedBenchmarkState, runtimeOpts);
const benchmarkState = this._state.get(benchmarkSignature);
benchmarkState!.progress = progress;
const { executedIterations, avgIteration, estimated, runtime } = progress;
const runIter = executedIterations.toString().padEnd(5, " ");
const avgIter = `${avgIteration.toFixed(2)}ms`.padEnd(10, " ");
const remaining = estimated - runtime;
if (this.isInteractive) {
this.scheduleUpdate();
} else {
this.scheduleUpdate(2500, () => {
this.stdoutWrite(
` :: ${benchmarkState!.displayPath} > ran: ${runIter} | avg: ${avgIter} | remainingTime: ${remaining}s \n`
);
});
}
}
init() {
if (this.isInteractive) {
this.updateStream();
} else {
this.stdoutWrite(INIT_MSG);
}
}
finish() {
this._clearTimeout();
this._proxyStream.unproxyStream();
if (this.isInteractive) {
this.updateStream();
} else {
this.stdoutWrite('\n');
}
}
} | the_stack |
import * as path from "path";
import * as shelljs from "shelljs";
import * as semver from "semver";
import * as constants from "../constants";
import {
IPluginsService,
IPreparePluginNativeCodeData,
IPluginData,
IPackageJsonDepedenciesResult,
IBasePluginData,
INodeModuleData,
} from "../definitions/plugins";
import {
IPlatformsDataService,
INodeModulesDependenciesBuilder,
IPlatformData,
} from "../definitions/platform";
import { IProjectDataService, IProjectData } from "../definitions/project";
import {
INodePackageManagerInstallOptions,
INodePackageManager,
IOptions,
IDependencyData,
} from "../declarations";
import {
IFileSystem,
IErrors,
IDictionary,
IStringDictionary,
} from "../common/declarations";
import { IFilesHashService } from "../definitions/files-hash-service";
import * as _ from "lodash";
import { IInjector } from "../common/definitions/yok";
import { injector } from "../common/yok";
import {
resolvePackagePath,
resolvePackageJSONPath,
} from "../helpers/package-path-helper";
export class PluginsService implements IPluginsService {
private static INSTALL_COMMAND_NAME = "install";
private static UNINSTALL_COMMAND_NAME = "uninstall";
private static NPM_CONFIG = {
save: true,
};
private static LOCK_FILES = [
"package-lock.json",
"npm-shrinkwrap.json",
"yarn.lock",
"pnpm-lock.yaml",
];
private get $platformsDataService(): IPlatformsDataService {
return this.$injector.resolve("platformsDataService");
}
private get $projectDataService(): IProjectDataService {
return this.$injector.resolve("projectDataService");
}
private get npmInstallOptions(): INodePackageManagerInstallOptions {
return _.merge(
{
disableNpmInstall: this.$options.disableNpmInstall,
frameworkPath: this.$options.frameworkPath,
ignoreScripts: this.$options.ignoreScripts,
path: this.$options.path,
},
PluginsService.NPM_CONFIG
);
}
constructor(
private $packageManager: INodePackageManager,
private $fs: IFileSystem,
private $options: IOptions,
private $logger: ILogger,
private $errors: IErrors,
private $filesHashService: IFilesHashService,
private $injector: IInjector,
private $mobileHelper: Mobile.IMobileHelper,
private $nodeModulesDependenciesBuilder: INodeModulesDependenciesBuilder
) {}
public async add(plugin: string, projectData: IProjectData): Promise<void> {
await this.ensure(projectData);
const possiblePackageName = path.resolve(plugin);
if (
possiblePackageName.indexOf(".tgz") !== -1 &&
this.$fs.exists(possiblePackageName)
) {
plugin = possiblePackageName;
}
const name = (
await this.$packageManager.install(
plugin,
projectData.projectDir,
this.npmInstallOptions
)
).name;
const pathToRealNpmPackageJson = this.getPackageJsonFilePathForModule(
name,
projectData.projectDir
);
const realNpmPackageJson = this.$fs.readJson(pathToRealNpmPackageJson);
if (realNpmPackageJson.nativescript) {
const pluginData = this.convertToPluginData(
realNpmPackageJson,
projectData.projectDir
);
// Validate
const action = async (
pluginDestinationPath: string,
platform: constants.PlatformTypes,
platformData: IPlatformData
): Promise<void> => {
this.isPluginDataValidForPlatform(pluginData, platform, projectData);
};
await this.executeForAllInstalledPlatforms(action, projectData);
this.$logger.info(
`Successfully installed plugin ${realNpmPackageJson.name}.`
);
} else {
await this.$packageManager.uninstall(
realNpmPackageJson.name,
{ save: true },
projectData.projectDir
);
this.$errors.fail(
`${plugin} is not a valid NativeScript plugin. Verify that the plugin package.json file contains a nativescript key and try again.`
);
}
}
public async remove(
pluginName: string,
projectData: IProjectData
): Promise<void> {
const removePluginNativeCodeAction = async (
modulesDestinationPath: string,
platform: string,
platformData: IPlatformData
): Promise<void> => {
const pluginData = this.convertToPluginData(
this.getNodeModuleData(pluginName, projectData.projectDir),
projectData.projectDir
);
await platformData.platformProjectService.removePluginNativeCode(
pluginData,
projectData
);
};
await this.executeForAllInstalledPlatforms(
removePluginNativeCodeAction,
projectData
);
await this.executeNpmCommand(
PluginsService.UNINSTALL_COMMAND_NAME,
pluginName,
projectData
);
let showMessage = true;
const action = async (
modulesDestinationPath: string,
platform: string,
platformData: IPlatformData
): Promise<void> => {
shelljs.rm("-rf", path.join(modulesDestinationPath, pluginName));
this.$logger.info(
`Successfully removed plugin ${pluginName} for ${platform}.`
);
showMessage = false;
};
await this.executeForAllInstalledPlatforms(action, projectData);
if (showMessage) {
this.$logger.info(`Successfully removed plugin ${pluginName}`);
}
}
public addToPackageJson(
plugin: string,
version: string,
isDev: boolean,
projectDir: string
) {
const packageJsonPath = this.getPackageJsonFilePath(projectDir);
let packageJsonContent = this.$fs.readJson(packageJsonPath);
const collectionKey = isDev ? "devDependencies" : "dependencies";
const oppositeCollectionKey = isDev ? "dependencies" : "devDependencies";
if (
packageJsonContent[oppositeCollectionKey] &&
packageJsonContent[oppositeCollectionKey][plugin]
) {
const result = this.removeDependencyFromPackageJsonContent(
plugin,
packageJsonContent
);
packageJsonContent = result.packageJsonContent;
}
packageJsonContent[collectionKey] = packageJsonContent[collectionKey] || {};
packageJsonContent[collectionKey][plugin] = version;
this.$fs.writeJson(packageJsonPath, packageJsonContent);
}
public removeFromPackageJson(plugin: string, projectDir: string) {
const packageJsonPath = this.getPackageJsonFilePath(projectDir);
const packageJsonContent = this.$fs.readJson(packageJsonPath);
const result = this.removeDependencyFromPackageJsonContent(
plugin,
packageJsonContent
);
if (result.hasModifiedPackageJson) {
this.$fs.writeJson(packageJsonPath, result.packageJsonContent);
}
}
public async preparePluginNativeCode({
pluginData,
platform,
projectData,
}: IPreparePluginNativeCodeData): Promise<void> {
const platformData = this.$platformsDataService.getPlatformData(
platform,
projectData
);
const pluginPlatformsFolderPath = pluginData.pluginPlatformsFolderPath(
platform
);
if (this.$fs.exists(pluginPlatformsFolderPath)) {
const pathToPluginsBuildFile = path.join(
platformData.projectRoot,
constants.PLUGINS_BUILD_DATA_FILENAME
);
const allPluginsNativeHashes = this.getAllPluginsNativeHashes(
pathToPluginsBuildFile
);
const oldPluginNativeHashes = allPluginsNativeHashes[pluginData.name];
const currentPluginNativeHashes = await this.getPluginNativeHashes(
pluginPlatformsFolderPath
);
if (
!oldPluginNativeHashes ||
this.$filesHashService.hasChangesInShasums(
oldPluginNativeHashes,
currentPluginNativeHashes
)
) {
await platformData.platformProjectService.preparePluginNativeCode(
pluginData,
projectData
);
this.setPluginNativeHashes({
pathToPluginsBuildFile,
pluginData,
currentPluginNativeHashes,
allPluginsNativeHashes,
});
}
}
}
public async ensureAllDependenciesAreInstalled(
projectData: IProjectData
): Promise<void> {
const packageJsonContent = this.$fs.readJson(
this.getPackageJsonFilePath(projectData.projectDir)
);
const allDependencies = _.keys(packageJsonContent.dependencies).concat(
_.keys(packageJsonContent.devDependencies)
);
const notInstalledDependencies = allDependencies
.map((dep) => {
this.$logger.trace(`Checking if ${dep} is installed...`);
const pathToPackage = resolvePackagePath(dep, {
paths: [projectData.projectDir],
});
if (pathToPackage) {
// return false if the dependency is installed - we'll filter out boolean values
// and end up with an array of dep names that are not installed if we end up
// inside the catch block.
return false;
}
this.$logger.trace(`${dep} is not installed, or couldn't be found`);
return dep;
})
.filter(Boolean);
if (this.$options.force || notInstalledDependencies.length) {
this.$logger.trace(
"Npm install will be called from CLI. Force option is: ",
this.$options.force,
" Not installed dependencies are: ",
notInstalledDependencies
);
await this.$packageManager.install(
projectData.projectDir,
projectData.projectDir,
{
disableNpmInstall: this.$options.disableNpmInstall,
frameworkPath: this.$options.frameworkPath,
ignoreScripts: this.$options.ignoreScripts,
path: this.$options.path,
}
);
}
}
public async getAllInstalledPlugins(
projectData: IProjectData
): Promise<IPluginData[]> {
const nodeModules = (
await this.getAllInstalledModules(projectData)
).map((nodeModuleData) =>
this.convertToPluginData(nodeModuleData, projectData.projectDir)
);
return _.filter(
nodeModules,
(nodeModuleData) => nodeModuleData && nodeModuleData.isPlugin
);
}
public getAllProductionPlugins(
projectData: IProjectData,
platform: string,
dependencies?: IDependencyData[]
): IPluginData[] {
dependencies =
dependencies ||
this.$nodeModulesDependenciesBuilder.getProductionDependencies(
projectData.projectDir,
projectData.ignoredDependencies
);
if (_.isEmpty(dependencies)) {
return [];
}
let productionPlugins: IDependencyData[] = dependencies.filter(
(d) => !!d.nativescript
);
productionPlugins = this.ensureValidProductionPlugins(
productionPlugins,
projectData.projectDir,
platform
);
return productionPlugins
.map((plugin) => this.convertToPluginData(plugin, projectData.projectDir))
.filter((item, idx, self) => {
// Filter out duplicates to speed up build times by not building the same dependency
// multiple times. One possible downside is that if there are different versions
// of the same native dependency only the first one in the array will be built
return self.findIndex((p) => p.name === item.name) === idx;
});
}
public getDependenciesFromPackageJson(
projectDir: string
): IPackageJsonDepedenciesResult {
const packageJson = this.$fs.readJson(
this.getPackageJsonFilePath(projectDir)
);
const dependencies: IBasePluginData[] = this.getBasicPluginInformation(
packageJson.dependencies
);
const devDependencies: IBasePluginData[] = this.getBasicPluginInformation(
packageJson.devDependencies
);
return {
dependencies,
devDependencies,
};
}
public isNativeScriptPlugin(pluginPackageJsonPath: string): boolean {
const pluginPackageJsonContent = this.$fs.readJson(pluginPackageJsonPath);
return pluginPackageJsonContent && pluginPackageJsonContent.nativescript;
}
private ensureValidProductionPlugins = _.memoize<
(
productionDependencies: IDependencyData[],
projectDir: string,
platform: string
) => IDependencyData[]
>(
this._ensureValidProductionPlugins,
(
productionDependencies: IDependencyData[],
projectDir: string,
platform: string
) => {
let key = _.sortBy(productionDependencies, (p) => p.directory)
.map((d) => JSON.stringify(d, null, 2))
.join("\n");
key += projectDir + platform;
return key;
}
);
private _ensureValidProductionPlugins(
productionDependencies: IDependencyData[],
projectDir: string,
platform: string
): IDependencyData[] {
let clonedProductionDependencies = _.cloneDeep(productionDependencies);
platform = platform.toLowerCase();
if (this.$mobileHelper.isAndroidPlatform(platform)) {
this.ensureValidProductionPluginsForAndroid(clonedProductionDependencies);
} else if (this.$mobileHelper.isiOSPlatform(platform)) {
clonedProductionDependencies = this.ensureValidProductionPluginsForIOS(
clonedProductionDependencies,
projectDir,
platform
);
}
return clonedProductionDependencies;
}
private ensureValidProductionPluginsForAndroid(
productionDependencies: IDependencyData[]
): void {
const dependenciesGroupedByName = _.groupBy(
productionDependencies,
(p) => p.name
);
_.each(
dependenciesGroupedByName,
(dependencyOccurrences, dependencyName) => {
if (dependencyOccurrences.length > 1) {
// the dependency exists multiple times in node_modules
const dependencyOccurrencesGroupedByVersion = _.groupBy(
dependencyOccurrences,
(g) => g.version
);
const versions = _.keys(dependencyOccurrencesGroupedByVersion);
if (versions.length === 1) {
// all dependencies with this name have the same version
this.$logger.debug(
`Detected same versions (${_.first(
versions
)}) of the ${dependencyName} installed at locations: ${_.map(
dependencyOccurrences,
(d) => d.directory
).join(", ")}`
);
} else {
this.$logger.debug(
`Detected different versions of the ${dependencyName} installed at locations: ${_.map(
dependencyOccurrences,
(d) => d.directory
).join(", ")}\nThis can cause build failures.`
);
}
}
}
);
}
private ensureValidProductionPluginsForIOS(
productionDependencies: IDependencyData[],
projectDir: string,
platform: string
): IDependencyData[] {
const dependenciesWithFrameworks: any[] = [];
_.each(productionDependencies, (d) => {
const pathToPlatforms = path.join(d.directory, "platforms", platform);
if (this.$fs.exists(pathToPlatforms)) {
const contents = this.$fs.readDirectory(pathToPlatforms);
_.each(contents, (file) => {
if (path.extname(file) === ".framework") {
dependenciesWithFrameworks.push({
...d,
frameworkName: path.basename(file),
frameworkLocation: path.join(pathToPlatforms, file),
});
}
});
}
});
if (dependenciesWithFrameworks.length > 0) {
const dependenciesGroupedByFrameworkName = _.groupBy(
dependenciesWithFrameworks,
(d) => d.frameworkName
);
_.each(
dependenciesGroupedByFrameworkName,
(dependencyOccurrences, frameworkName) => {
if (dependencyOccurrences.length > 1) {
// A framework exists multiple times in node_modules
const groupedByName = _.groupBy(
dependencyOccurrences,
(d) => d.name
);
const pluginsNames = _.keys(groupedByName);
if (pluginsNames.length > 1) {
// fail - the same framework is installed by different dependencies.
const locations = dependencyOccurrences.map(
(d) => d.frameworkLocation
);
let msg = `Detected the framework ${frameworkName} is installed from multiple plugins at locations:\n${locations.join(
"\n"
)}\n`;
msg += this.getHelpMessage(projectDir);
this.$errors.fail(msg);
}
const dependencyName = _.first(pluginsNames);
const dependencyOccurrencesGroupedByVersion = _.groupBy(
dependencyOccurrences,
(g) => g.version
);
const versions = _.keys(dependencyOccurrencesGroupedByVersion);
if (versions.length === 1) {
// all dependencies with this name have the same version
this.$logger.warn(
`Detected the framework ${frameworkName} is installed multiple times from the same versions of plugin (${_.first(
versions
)}) at locations: ${_.map(
dependencyOccurrences,
(d) => d.directory
).join(", ")}`
);
const selectedPackage = _.minBy(
dependencyOccurrences,
(d) => d.depth
);
this.$logger.info(
`CLI will use only the native code from '${selectedPackage.directory}'.`
.green
);
_.each(dependencyOccurrences, (dependency) => {
if (dependency !== selectedPackage) {
productionDependencies.splice(
productionDependencies.indexOf(dependency),
1
);
}
});
} else {
const message = this.getFailureMessageForDifferentDependencyVersions(
dependencyName,
frameworkName,
dependencyOccurrencesGroupedByVersion,
projectDir
);
this.$errors.fail(message);
}
}
}
);
}
return productionDependencies;
}
private getFailureMessageForDifferentDependencyVersions(
dependencyName: string,
frameworkName: string,
dependencyOccurrencesGroupedByVersion: IDictionary<IDependencyData[]>,
projectDir: string
): string {
let message = `Cannot use the same framework ${frameworkName} multiple times in your application.
This framework comes from ${dependencyName} plugin, which is installed multiple times in node_modules:\n`;
_.each(dependencyOccurrencesGroupedByVersion, (dependencies, version) => {
message += dependencies.map(
(d) => `* Path: ${d.directory}, version: ${d.version}\n`
);
});
message += this.getHelpMessage(projectDir);
return message;
}
private getHelpMessage(projectDir: string): string {
const existingLockFiles: string[] = [];
PluginsService.LOCK_FILES.forEach((lockFile) => {
if (this.$fs.exists(path.join(projectDir, lockFile))) {
existingLockFiles.push(lockFile);
}
});
let msgForLockFiles: string = "";
if (existingLockFiles.length) {
msgForLockFiles += ` and ${existingLockFiles.join(", ")}`;
}
return `\nProbably you need to update your dependencies, remove node_modules${msgForLockFiles} and try again.`;
}
private convertToPluginData(
cacheData: IDependencyData | INodeModuleData,
projectDir: string
): IPluginData {
const pluginData: any = {};
pluginData.name = cacheData.name;
pluginData.version = cacheData.version;
pluginData.fullPath =
(<IDependencyData>cacheData).directory ||
path.dirname(
this.getPackageJsonFilePathForModule(cacheData.name, projectDir)
);
pluginData.isPlugin = !!cacheData.nativescript;
pluginData.pluginPlatformsFolderPath = (platform: string) =>
path.join(pluginData.fullPath, "platforms", platform.toLowerCase());
const data = cacheData.nativescript;
if (pluginData.isPlugin) {
pluginData.platformsData = data.platforms;
pluginData.pluginVariables = data.variables;
}
return pluginData;
}
private removeDependencyFromPackageJsonContent(
dependency: string,
packageJsonContent: any
): { hasModifiedPackageJson: boolean; packageJsonContent: any } {
let hasModifiedPackageJson = false;
if (
packageJsonContent.devDependencies &&
packageJsonContent.devDependencies[dependency]
) {
delete packageJsonContent.devDependencies[dependency];
hasModifiedPackageJson = true;
}
if (
packageJsonContent.dependencies &&
packageJsonContent.dependencies[dependency]
) {
delete packageJsonContent.dependencies[dependency];
hasModifiedPackageJson = true;
}
return {
hasModifiedPackageJson,
packageJsonContent,
};
}
private getBasicPluginInformation(dependencies: any): IBasePluginData[] {
return _.map(dependencies, (version: string, key: string) => ({
name: key,
version: version,
}));
}
private getNodeModulesPath(projectDir: string): string {
return path.join(projectDir, "node_modules");
}
private getPackageJsonFilePath(projectDir: string): string {
return path.join(projectDir, "package.json");
}
private getPackageJsonFilePathForModule(
moduleName: string,
projectDir: string
): string {
const pathToJsonFile = resolvePackageJSONPath(moduleName, {
paths: [projectDir],
});
return pathToJsonFile;
}
private getDependencies(projectDir: string): string[] {
const packageJsonFilePath = this.getPackageJsonFilePath(projectDir);
return _.keys(require(packageJsonFilePath).dependencies);
}
private getNodeModuleData(
module: string,
projectDir: string
): INodeModuleData {
// module can be modulePath or moduleName
if (!this.$fs.exists(module) || path.basename(module) !== "package.json") {
module = this.getPackageJsonFilePathForModule(module, projectDir);
}
const data = this.$fs.readJson(module);
return {
name: data.name,
version: data.version,
fullPath: path.dirname(module),
isPlugin: data.nativescript !== undefined,
nativescript: data.nativescript,
};
}
private async ensure(projectData: IProjectData): Promise<void> {
await this.ensureAllDependenciesAreInstalled(projectData);
this.$fs.ensureDirectoryExists(
this.getNodeModulesPath(projectData.projectDir)
);
}
private async getAllInstalledModules(
projectData: IProjectData
): Promise<INodeModuleData[]> {
await this.ensure(projectData);
const nodeModules = this.getDependencies(projectData.projectDir);
return _.map(nodeModules, (nodeModuleName) =>
this.getNodeModuleData(nodeModuleName, projectData.projectDir)
);
}
private async executeNpmCommand(
npmCommandName: string,
npmCommandArguments: string,
projectData: IProjectData
): Promise<string> {
if (npmCommandName === PluginsService.INSTALL_COMMAND_NAME) {
await this.$packageManager.install(
npmCommandArguments,
projectData.projectDir,
this.npmInstallOptions
);
} else if (npmCommandName === PluginsService.UNINSTALL_COMMAND_NAME) {
await this.$packageManager.uninstall(
npmCommandArguments,
PluginsService.NPM_CONFIG,
projectData.projectDir
);
}
return this.parseNpmCommandResult(npmCommandArguments);
}
private parseNpmCommandResult(npmCommandResult: string): string {
return npmCommandResult.split("@")[0]; // returns plugin name
}
private async executeForAllInstalledPlatforms(
action: (
_pluginDestinationPath: string,
pl: string,
_platformData: IPlatformData
) => Promise<void>,
projectData: IProjectData
): Promise<void> {
const availablePlatforms = this.$mobileHelper.platformNames.map((p) =>
p.toLowerCase()
);
for (const platform of availablePlatforms) {
const isPlatformInstalled = this.$fs.exists(
path.join(projectData.platformsDir, platform.toLowerCase())
);
if (isPlatformInstalled) {
const platformData = this.$platformsDataService.getPlatformData(
platform.toLowerCase(),
projectData
);
const pluginDestinationPath = path.join(
platformData.appDestinationDirectoryPath,
constants.APP_FOLDER_NAME,
"tns_modules"
);
await action(
pluginDestinationPath,
platform.toLowerCase(),
platformData
);
}
}
}
private getInstalledFrameworkVersion(
platform: constants.PlatformTypes,
projectData: IProjectData
): string {
const runtimePackage = this.$projectDataService.getRuntimePackage(
projectData.projectDir,
platform
);
// const platformData = this.$platformsDataService.getPlatformData(platform, projectData);
// const frameworkData = this.$projectDataService.getNSValue(projectData.projectDir, platformData.frameworkPackageName);
return runtimePackage.version;
}
private isPluginDataValidForPlatform(
pluginData: IPluginData,
platform: constants.PlatformTypes,
projectData: IProjectData
): boolean {
let isValid = true;
const installedFrameworkVersion = this.getInstalledFrameworkVersion(
platform,
projectData
);
const pluginPlatformsData = pluginData.platformsData;
if (pluginPlatformsData) {
const versionRequiredByPlugin = (<any>pluginPlatformsData)[platform];
if (!versionRequiredByPlugin) {
this.$logger.warn(
`${pluginData.name} is not supported for ${platform}.`
);
isValid = false;
} else if (
semver.gt(versionRequiredByPlugin, installedFrameworkVersion)
) {
this.$logger.warn(
`${pluginData.name} requires at least version ${versionRequiredByPlugin} of platform ${platform}. Currently installed version is ${installedFrameworkVersion}.`
);
isValid = false;
}
}
return isValid;
}
private async getPluginNativeHashes(
pluginPlatformsDir: string
): Promise<IStringDictionary> {
let data: IStringDictionary = {};
if (this.$fs.exists(pluginPlatformsDir)) {
const pluginNativeDataFiles = this.$fs.enumerateFilesInDirectorySync(
pluginPlatformsDir
);
data = await this.$filesHashService.generateHashes(pluginNativeDataFiles);
}
return data;
}
private getAllPluginsNativeHashes(
pathToPluginsBuildFile: string
): IDictionary<IStringDictionary> {
let data: IDictionary<IStringDictionary> = {};
if (this.$fs.exists(pathToPluginsBuildFile)) {
data = this.$fs.readJson(pathToPluginsBuildFile);
}
return data;
}
private setPluginNativeHashes(opts: {
pathToPluginsBuildFile: string;
pluginData: IPluginData;
currentPluginNativeHashes: IStringDictionary;
allPluginsNativeHashes: IDictionary<IStringDictionary>;
}): void {
opts.allPluginsNativeHashes[opts.pluginData.name] =
opts.currentPluginNativeHashes;
this.$fs.writeJson(
opts.pathToPluginsBuildFile,
opts.allPluginsNativeHashes
);
}
}
injector.register("pluginsService", PluginsService); | the_stack |
module Cats.Gui.Editor {
var HashHandler = ace.require('ace/keyboard/hash_handler').HashHandler;
/**
* Case insensitive sorting algoritme
*/
function caseInsensitiveSort(a: { name: string; }, b: { name: string; }) {
if (a.name.toLowerCase() < b.name.toLowerCase()) return -1;
if (a.name.toLowerCase() > b.name.toLowerCase()) return 1;
// the lower-case strings are equal, so now put them in local order..
if (a.name < b.name) return -1;
if (a.name > b.name) return 1;
return 0;
}
interface EntryData {
getCaption():string;
meta: any;
snippet: any;
getValue() : any;
}
/**
* This class takes care of the autocomplete popup and deals with
* the key events and filtering of the results while you are typing
*/
export class AutoCompletePopup extends qx.ui.popup.Popup {
private listModel: any;
private handler : ace.HashHandler;
private changeListener: Function;
private list: qx.ui.list.List;
private filtered: any[];
private cursorPos = 0;
private sourceEditor: SourceEditor;
private editor: ace.Editor;
/**
* Create the new Autocomplete popup window
*/
constructor() {
super(new qx.ui.layout.Flow());
// this.setDecorator(null);
this.setPadding(0, 0, 0, 0);
this.setMargin(0, 0, 0, 0);
this.setWidth(300);
this.setHeight(200);
// this.setBackgroundColor("transparent");
this.createList();
this.initHandler();
this.changeListener = (ev:Event) => this.onChange(ev);
this.addListener("disappear", this.hidePopup, this);
}
/**
* Create the list that hold the completions
*/
private createList() {
var self = this;
// Creates the list and configures it
var list: qx.ui.list.List = new qx.ui.list.List(null).set({
scrollbarX: "off",
selectionMode: "single",
// height: 280,
width: 300,
labelPath: "caption",
iconPath: "meta",
iconOptions: {
converter: (data:string) => {
var icon = IDE.icons.kind[data] || IDE.icons.kind["default"];
return icon;
}
}
});
list.setDecorator(null);
list.setBackgroundColor("transparent");
this.add(list);
this.list = list;
this.list.addListener("click" , this.insertSelectedItem.bind(this));
}
/**
* Get the text between cursor and start
*/
private getInputText(): string {
var cursor = this.sourceEditor.getPosition();
var text = this.sourceEditor.getLine(cursor.row).slice(0, cursor.column);
// console.log("input text:" + text);
var matches = text.match(/[a-zA-Z_0-9\$]*$/);
if (matches && matches[0])
return matches[0];
else
return "";
}
private match_strict(text: string, completion: string) {
if (!text) return true;
if (completion.indexOf(text) === 0) return true;
return false;
}
private match_forgiven(text: string, completion: string) {
if (!text) return true;
if (completion.indexOf(text) > -1) return true;
return false;
}
/**
* Filter the available completions based on the users text input
* so far.
*/
private updateFilter() {
var text = this.getInputText();
if (text) text = text.toLowerCase();
var matchFunction = this.match_forgiven;
if (IDE.config.editor.completionMode) {
var methodName = "match_" + IDE.config.editor.completionMode;
if (this[methodName]) matchFunction = this[methodName];
}
var lastItem = this.listModel.getItem(this.listModel.getLength() - 1);
var counter = 0;
this.filtered = [];
var delegate = {
filter : (data:EntryData) => {
var value = data.getCaption().toLowerCase();
var result = matchFunction(text, value);
if (result) this.filtered.push(data);
if (data === lastItem) {
// IDE.console.log("filtered items: " + this.filtered.length);
// @TODO check for selected
var selection = this.list.getSelection().getItem(0);
if (!(selection && (this.filtered.indexOf(selection) > -1))) {
this.cursorPos = 0;
this.moveCursor(0);
}
}
return result;
}
}
this.list.setDelegate(delegate);
}
private moveCursor(row: number) {
this.cursorPos += row;
var len = this.filtered.length - 1;
if (this.cursorPos > len) this.cursorPos = len;
if (this.cursorPos < 0) this.cursorPos = 0;
var item = this.filtered[this.cursorPos];
this.list.resetSelection();
(<any>this.list.getSelection()).push(item);
// IDE.console.log("Cursor:" + this.cursorPos);
}
private insertSelectedItem() {
var current = this.list.getSelection().getItem(0);;
if (current) {
var inputText = this.getInputText();
for (var i = 0; i < inputText.length; i++) {
this.editor.remove("left");
}
if (current.getMeta() === "snippet") {
this.editor.insertSnippet(current.getSnippet());
} else {
this.editor.insert(current.getValue());
}
}
this.hidePopup();
}
/**
* Setup the the keybindings so when typed the key events go to the
* popup window and not the editor.
*/
private initHandler() {
this.handler = new HashHandler();
this.handler.bindKey("Home", () => { this.moveCursor(-10000); });
this.handler.bindKey("End", () => { this.moveCursor(10000); });
this.handler.bindKey("Down", () => { this.moveCursor(1); });
this.handler.bindKey("PageDown", () => { this.moveCursor(10); });
this.handler.bindKey("Up", () => { this.moveCursor(-1); });
this.handler.bindKey("PageUp", () => { this.moveCursor(-10); });
this.handler.bindKey("Esc", () => { this.hidePopup(); });
this.handler.bindKey("Return|Tab", () => { this.insertSelectedItem(); });
}
private isExecutable(kind:string) {
if (kind === "method" || kind === "function" || kind === "constructor") return true;
return false;
}
/**
* Show the popup and make sure the keybindings are in place.
*
*/
private showPopup(completions: CompletionEntry[]) {
if (this.list.isSeeable() || (completions.length === 0)) return;
var cursor = this.editor.getCursorPosition();
var coords = this.editor.renderer.textToScreenCoordinates(cursor.row, cursor.column);
this.editor.keyBinding.addKeyboardHandler(this.handler);
this.moveTo(coords.pageX, coords.pageY + 20);
var rawData = [];
completions.forEach((completion) => {
var extension = "";
if (this.isExecutable(completion.meta)) {
completion.caption += "()";
if (completion.value) completion.value += "()"
}
if ((! completion.caption) && (! completion.name)) {
console.log(completion);
return;
}
rawData.push({
caption: completion.caption || completion.name ,
meta: completion.meta,
snippet: completion.snippet || "",
value: completion.value || ""
});
});
this.listModel = qx.data.marshal.Json.createModel(rawData, false);
this.list.setModel(this.listModel);
this.updateFilter();
this.cursorPos = 0;
this.moveCursor(0);
this.show();
// this.editor.commands.on('afterExec', (e) => { this.onChange2(e);});
this.editor.getSession().on("change", this.changeListener);
}
/**
* Hide the popup and remove all the keybindings
*/
private hidePopup() {
this.editor.keyBinding.removeKeyboardHandler(this.handler);
this.exclude();
this.editor.getSession().removeListener('change', this.changeListener);
this.editor.focus();
}
/**
* Determines if the specified character may be part of a JS identifier
*/
private static isJsIdentifierPart(ch: number) {
ch |= 0; //tell JIT that ch is an int
return ch >= 97 && ch <= 122 //a-z
|| ch >= 65 && ch <= 90 //A-Z
|| ch >= 48 && ch <= 57 //0-9
|| ch === 95 //_
|| ch === 36 //$
|| ch > 127; //non-ASCII letter. Not accurate, but good enough for autocomplete
}
private onChange2(e:any) {
var key = e.args || "";
alert(key);
if ((key == null) || (!AutoCompletePopup.isJsIdentifierPart(key.charCodeAt(0)))) {
this.hidePopup();
return;
}
this.updateFilter();
}
/**
* Check wether the typed character is reason to stop
* the code completion
*/
private onChange(ev:any) {
var key: string = ev.lines.join("");
if ((key == null) || (!AutoCompletePopup.isJsIdentifierPart(key.charCodeAt(0)))) {
this.hidePopup();
return;
}
// hack to get the cursor updated before we render
// TODO find out how to force update without a timer delay
setTimeout(() => { this.updateFilter(); }, 0);
}
/**
* This method is called from the editor to start the code completion process.
*/
complete(memberCompletionOnly:boolean, sourceEditor:SourceEditor, editor:ace.Editor) {
this.editor = editor;
this.sourceEditor = sourceEditor;
var session = editor.getSession();
var pos = editor.getCursorPosition();
var line = session.getLine(pos.row);
var prefix = retrievePrecedingIdentifier(line, pos.column);
// this.base = session.doc.createAnchor(pos.row, pos.column - prefix.length);
var matches:CompletionEntry[] = [];
var completers = getCompleters(sourceEditor, memberCompletionOnly);
var total = completers.length;
completers.forEach((completer, i) => {
completer.getCompletions(editor, session, pos, prefix, (err:any, results:CompletionEntry[]) => {
total--;
if (!err) matches = matches.concat(results);
if (total === 0) {
this.showPopup(matches);
}
});
});
}
}
} | the_stack |
import {
CodeModel,
ObjectSchema,
isObjectSchema,
SchemaType,
Property,
ParameterLocation,
Parameter,
VirtualParameter,
getAllProperties,
ImplementationLocation,
Request,
} from "@autorest/codemodel";
import { Session } from "@autorest/extension-base";
import { ModelerFourOptions } from "../modeler/modelerfour-options";
import { isDefined } from "../utils";
const xmsThreshold = "x-ms-payload-flattening-threshold";
const xmsFlatten = "x-ms-client-flatten";
const isCurrentlyFlattening = "x-ms-flattening";
const hasBeenFlattened = "x-ms-flattened";
export class Flattener {
codeModel: CodeModel;
options: ModelerFourOptions = {};
threshold = 0;
recursePayload = false;
constructor(protected session: Session<CodeModel>) {
this.codeModel = session.model; // shadow(session.model, filename);
}
async init() {
// get our configuration for this run.
this.options = await this.session.getValue("modelerfour", {});
this.threshold = await this.session.getValue("payload-flattening-threshold", 0);
this.recursePayload = await this.session.getValue("recursive-payload-flattening", false);
return this;
}
*getFlattenedParameters(
parameter: Parameter,
property: Property,
path: Array<Property> = [],
): Iterable<VirtualParameter> {
if (property.readOnly) {
// skip read-only properties
return;
}
if (isObjectSchema(property.schema) && this.recursePayload === true) {
for (const child of getAllProperties(<ObjectSchema>property.schema)) {
yield* this.getFlattenedParameters(parameter, child, [...path, property]);
}
} else {
const vp = new VirtualParameter(
property.language.default.name,
property.language.default.description,
property.schema,
{
...property,
implementation: ImplementationLocation.Method,
originalParameter: parameter,
targetProperty: property,
pathToProperty: path,
},
);
delete (<any>vp).serializedName;
delete (<any>vp).readOnly;
delete (<any>vp).isDiscriminator;
delete (<any>vp).flattenedNames;
// if the parameter has "x-ms-parameter-grouping" extension, (and this is a top level parameter) then we should copy that to the vp.
if (path.length === 0 && parameter.extensions?.["x-ms-parameter-grouping"]) {
(vp.extensions = vp.extensions || {})["x-ms-parameter-grouping"] =
parameter.extensions?.["x-ms-parameter-grouping"];
}
yield vp;
}
// ·
}
/**
* This flattens an request's parameters (ie, takes the parameters from an operation and if they are objects will attempt to create inline versions of them)
*/
flattenPayload(request: Request, parameter: Parameter, schema: ObjectSchema) {
// hide the original parameter
parameter.flattened = true;
for (const property of getAllProperties(schema)) {
if (property.readOnly) {
// skip read-only properties
continue;
}
for (const vp of this.getFlattenedParameters(parameter, property)) {
request.parameters?.push(vp);
}
}
}
/**
* This will flatten models that are marked 'x-ms-client-flatten'
* @param schema schema to recursively flatten
*/
flattenSchema(schema: ObjectSchema) {
const state = schema.extensions?.[isCurrentlyFlattening];
if (state === false) {
// already done.
return;
}
if (state === true) {
// in progress.
throw new Error(
`Circular reference encountered during processing of x-ms-client flatten ('${schema.language.default.name}')`,
);
}
// hasn't started yet.
schema.extensions = schema.extensions || {};
schema.extensions[isCurrentlyFlattening] = true;
// ensure that parent schemas are done first -- this should remove
// the problem when the order isn't just right.
for (const parent of schema.parents?.immediate ?? []) {
if (isObjectSchema(parent)) {
this.flattenSchema(parent);
}
}
if (schema.properties) {
for (const [index, property] of [...schema.properties.entries()].reverse()) {
if (isObjectSchema(property.schema) && property.extensions?.[xmsFlatten]) {
// first, ensure tha the child is pre-flattened
this.flattenSchema(property.schema);
// remove that property from the scheama
schema.properties.splice(index, 1);
// copy all of the properties from the child into this
// schema
for (const childProperty of getAllProperties(property.schema)) {
schema.addProperty(
new Property(
childProperty.language.default.name,
childProperty.language.default.description,
childProperty.schema,
{
...childProperty,
flattenedNames: [
property.serializedName,
...(childProperty.flattenedNames ? childProperty.flattenedNames : [childProperty.serializedName]),
],
required: property.required && childProperty.required,
nullable: property.nullable ?? childProperty.nullable,
},
),
);
}
// remove the extension
delete property.extensions[xmsFlatten];
if (Object.keys(property.extensions ?? {}).length === 0) {
delete property["extensions"];
}
// and mark the child class as 'do-not-generate' ?
(property.schema.extensions = property.schema.extensions || {})[hasBeenFlattened] = true;
}
}
}
schema.extensions[isCurrentlyFlattening] = false;
}
public process() {
// support 'x-ms-payload-flattening-threshold' per-operation
// support '--payload-flattening-threshold:X' global setting
if (this.options["flatten-models"] === true) {
this.flattenModels();
}
if (this.options["flatten-payloads"] === true) {
this.flattenPayloads();
}
return this.codeModel;
}
private flattenModels() {
for (const schema of this.codeModel.schemas.objects ?? []) {
this.flattenSchema(schema);
}
if (!this.options["keep-unused-flattened-models"]) {
this.removeUnusedFlattenModels();
}
for (const schema of this.codeModel.schemas.objects ?? []) {
if (schema.extensions) {
delete schema.extensions[isCurrentlyFlattening];
// don't want this until I have removed the unreferenced models.
// delete schema.extensions[hasBeenFlattened];
if (schema.extensions && Object.keys(schema.extensions).length === 0) {
delete schema["extensions"];
}
}
}
}
private removeUnusedFlattenModels() {
if (!this.codeModel.schemas.objects) {
return;
}
const objects: Array<ObjectSchema | undefined> = this.codeModel.schemas.objects;
let dirty = false;
do {
// reset on every pass
dirty = false;
// remove unreferenced models
for (const [index, schema] of objects.entries()) {
if (schema === undefined) {
continue;
}
// only remove unreferenced models that have been flattened.
if (!schema.extensions?.[hasBeenFlattened]) {
continue;
}
if (schema.discriminatorValue || schema.discriminator) {
// it's polymorphic -- I don't think we can remove this
continue;
}
if (schema.children?.all || schema.parents?.all) {
// it's got either a parent or child schema.
continue;
}
if (!this.isReferencedByOthers(schema)) {
objects[index] = undefined;
dirty = true;
}
}
} while (dirty);
this.codeModel.schemas.objects = objects.filter(isDefined);
}
private isReferencedByOthers(schema: ObjectSchema) {
let count = 0;
return find(
this.codeModel,
(obj) => {
if (obj === schema) {
count++;
if (count > 1) {
return true;
}
}
return false;
},
new WeakSet<object>(),
);
}
private flattenPayloads() {
/**
* BodyParameter Payload Flattening
*
* A body parameter is flattened (one level) when:
*
* - the body parameter schema is an object
* - the body parameter schema is not polymorphic (is this true?)
*
*
*
* and one of:
* - the body parameter has x-ms-client-flatten: true
* - the operation has x-ms-payload-flattening-threshold greater than zero and the property count in the body parameter is lessthan or equal to that.
* - the global configuration option payload-flattening-threshold is greater than zero and the property count in the body parameter is lessthan or equal to that
*
*/
// flatten payloads
for (const group of this.codeModel.operationGroups) {
for (const operation of group.operations) {
// when there are multiple requests in an operation
// and the generator asks not to flatten them
if (
operation.requests &&
operation.requests.length > 1 &&
this.options["multiple-request-parameter-flattening"] === false
) {
continue;
}
for (const request of operation.requests ?? []) {
const body = request.parameters?.find(
(p) => p.protocol.http?.in === ParameterLocation.Body && p.implementation === ImplementationLocation.Method,
);
if (body && isObjectSchema(body.schema)) {
const schema = <ObjectSchema>body.schema;
if (schema.discriminator) {
// skip flattening on polymorphic payloads, since you don't know the actual type.
continue;
}
let flattenOperationPayload = body?.extensions?.[xmsFlatten];
if (flattenOperationPayload === false) {
// told not to explicitly.
continue;
}
if (!flattenOperationPayload) {
const threshold = <number>operation.extensions?.[xmsThreshold] ?? this.threshold;
if (threshold > 0) {
// get the count of the (non-readonly) properties in the schema
flattenOperationPayload =
[...getAllProperties(schema)].filter(
(property) => property.readOnly !== true && property.schema.type !== SchemaType.Constant,
).length <= threshold;
}
}
if (flattenOperationPayload) {
this.flattenPayload(request, body, schema);
request.updateSignatureParameters();
}
}
}
}
}
}
}
function find(instance: any, visitor: (item: any) => boolean, visited: WeakSet<object>): boolean {
if (instance === null || instance === undefined || visited.has(instance)) {
return false;
}
visited.add(instance);
if (instance instanceof Set || Array.isArray(instance)) {
for (const each of instance) {
if (typeof each === "object") {
if (find(each, visitor, visited)) {
return true;
}
}
if (visitor(each)) {
return true;
}
}
return false;
}
if (instance instanceof Map) {
// walk thru map members.
for (const [key, value] of instance.entries()) {
if (typeof value === "object") {
if (find(value, visitor, visited)) {
return true;
}
}
// yield the member after visiting children
if (visitor(value)) {
return true;
}
}
return false;
}
// objects
for (const key of Object.keys(instance)) {
const value = instance[key];
if (typeof value === "object") {
if (find(value, visitor, visited)) {
return true;
}
}
// yield the member after visiting children
if (visitor(value)) {
return true;
}
}
return false;
} | the_stack |
import 'core-js/es/number/is-integer';
// eslint-disable-next-line max-classes-per-file
import GhostContentAPI from '@tryghost/content-api';
const fuzzysort = require('fuzzysort');
/**
* Element.closest() polyfill
* https://developer.mozilla.org/en-US/docs/Web/API/Element/closest#Polyfill
*/
if (!Element.prototype.matches) {
Element.prototype.matches =
// @ts-ignore
Element.prototype.msMatchesSelector ||
Element.prototype.webkitMatchesSelector;
}
if (!Element.prototype.closest) {
// eslint-disable-next-line func-names
Element.prototype.closest = function (s: string) {
// eslint-disable-next-line @typescript-eslint/no-this-alias
let el: Element | null = this;
do {
if (el.matches(s)) return el;
el = el.parentElement;
} while (el !== null && el.nodeType === 1);
return null;
};
}
// TODO: Create custom type for any types
/* eslint-disable @typescript-eslint/no-explicit-any */
interface Props {
url: string;
key: string;
version?: string;
input?: string;
results?: string;
button?: string;
defaultValue?: string;
template?: any;
trigger?: string;
options?: any;
api?: any;
on?: any;
}
interface ObjectOfStrings {
[propName: string]: string;
}
export default class GhostSearch implements Props {
url: string;
key: string;
version?: string;
input?: string;
results?: string;
button?: string;
defaultValue?: string;
template?: any;
trigger?: string;
options?: any;
api?: any;
on?: any;
private check = false;
constructor(props?: Props) {
this.url = (props && props.url) || '';
this.key = (props && props.key) || '';
this.version = (props && props.version) || 'v3';
this.input = (props && props.input) || '#ghost-search-field';
this.results = (props && props.results) || '#ghost-search-results';
this.button = (props && props.button) || '';
this.defaultValue = (props && props.defaultValue) || '';
this.template =
(props && props.template) ||
((result: { slug: string; title: string }): string => {
const url = [
window.location.protocol,
'//',
window.location.host,
].join('');
return `<li><a href="${url}/${result.slug}">${result.title}</a></li>`;
});
this.trigger = (props && props.trigger) || 'focus';
this.options = (props && props.options) || {
keys: ['title'],
limit: 10,
threshold: -3500,
allowTypo: false,
};
this.api = (props && props.api) || {
resource: 'posts',
parameters: {
limit: 'all',
fields: ['title', 'slug', 'created_at'],
filter: '',
include: 'authors',
order: '',
formats: '',
page: '',
},
};
this.on = (props && props.on) || {
// eslint-disable-next-line @typescript-eslint/no-empty-function
beforeDisplay: () => {},
// eslint-disable-next-line @typescript-eslint/no-empty-function
afterDisplay: () => {},
// eslint-disable-next-line @typescript-eslint/no-empty-function
beforeFetch: () => {},
// eslint-disable-next-line @typescript-eslint/no-empty-function
afterFetch: () => {},
};
this.init();
}
fetch = () => {
this.on.beforeFetch();
const ghostAPI = new GhostContentAPI({
url: this.url,
key: this.key,
version: this.version,
});
const browse: ObjectOfStrings = {};
const { parameters } = this.api;
Object.keys(parameters).forEach((key: string) => {
if (parameters[key] !== '') browse[key] = parameters[key];
});
ghostAPI[this.api.resource]
.browse(browse)
.then((data: any) => {
this.search(data);
})
.catch((err: string) => {
console.error(err);
});
};
createElementFromHTML = (htmlString: string) => {
const div: HTMLDivElement = document.createElement('div');
div.innerHTML = htmlString.trim();
return div.firstChild;
};
// TODO: create an interface
displayResults = (data: any) => {
if (!this.results) return;
const inputElm: HTMLInputElement | null = document.querySelector(
this.input!
);
const resultsElm: Element | null = document.querySelector(this.results);
if (resultsElm) {
while (resultsElm.firstChild) {
resultsElm.removeChild(resultsElm.firstChild);
}
}
let inputValue: string | null;
if (this.defaultValue) {
inputValue = this.defaultValue;
} else if (inputElm) {
inputValue = inputElm.value;
} else {
inputValue = null;
}
const searchContainer: HTMLDivElement | null = document.querySelector(
'.search-container'
);
if (searchContainer) searchContainer.classList.add('dirty');
const results = fuzzysort.go(inputValue, data, {
keys: this.options.keys,
limit: this.options.limit,
allowTypo: this.options.allowTypo,
threshold: this.options.threshold,
});
if (resultsElm) {
Object.keys(results).forEach((key) => {
if (key < results.length) {
resultsElm.appendChild(
this.createElementFromHTML(
this.template(results[key].obj)
)!
);
}
});
}
this.on.afterDisplay(results);
this.defaultValue = '';
};
// TODO: create an interface for data
search = (data: any) => {
this.on.afterFetch(data);
this.check = true;
const inputElm: HTMLInputElement | null = document.querySelector(
this.input!
);
if (this.defaultValue !== '') {
this.on.beforeDisplay();
this.displayResults(data);
}
if (this.button) {
const button: HTMLInputElement | null = document.querySelector(
this.button
);
if (
button &&
button.tagName === 'INPUT' &&
button.type === 'submit'
) {
button.closest('form')!.addEventListener('submit', (e) => {
e.preventDefault();
});
button.addEventListener('click', (e) => {
e.preventDefault();
this.on.beforeDisplay();
this.displayResults(data);
});
}
} else if (inputElm) {
inputElm.addEventListener('keyup', () => {
this.on.beforeDisplay();
this.displayResults(data);
});
}
};
checkArgs = () => {
const resultsElm: HTMLInputElement | null = document.querySelector(
this.results!
);
const inputElm: HTMLInputElement | null = document.querySelector(
this.input!
);
if (!document.body.contains(inputElm)) {
console.log('Input not found.');
return false;
}
if (!document.body.contains(resultsElm)) {
console.log('Results not found.');
return false;
}
if (this.button && !document.querySelectorAll(this.button).length) {
console.log('Button not found.');
return false;
}
if (this.url === '') {
console.log(
'Content API Client Library url missing. Please set the url. Must not end in a trailing slash.'
);
return false;
}
if (this.key === '') {
console.log(
'Content API Client Library key missing. Please set the key. Hex string copied from the "Integrations" screen in Ghost Admin.'
);
return false;
}
return true;
};
validate = () => {
return this.checkArgs();
};
// TODO reformat these functions
public openSearch = () => {
const search: HTMLDivElement | null = document.querySelector('#search');
const bodyElm: HTMLBodyElement | null = document.querySelector('body');
const inputElm: HTMLInputElement | null = document.querySelector(
this.input!
);
if (search) search.style.display = 'block';
bodyElm!.classList.add('noscroll');
inputElm!.focus();
};
public closeSearch = () => {
const search: HTMLElement | null = document.querySelector('#search');
const resultsElm: HTMLElement | null = document.querySelector(
'#ghost-search-results'
);
if (
!search ||
search.style.display === 'none' ||
search.style.display === ''
) {
return;
}
search.style.display = 'none';
document!.querySelector('body')!.classList.remove('noscroll');
document!.querySelector('.search-container')!.classList.remove('dirty');
const inputElm: HTMLInputElement | null = document.querySelector(
this.input!
);
inputElm!.value = '';
if (resultsElm) {
while (resultsElm.firstChild) {
resultsElm.removeChild(resultsElm.firstChild);
}
}
};
init = () => {
const inputElm: HTMLInputElement | null = document.querySelector(
this.input!
);
if (!this.validate()) {
return;
}
if (this.defaultValue && inputElm) {
inputElm.value = this.defaultValue;
window.onload = () => {
if (!this.check) this.fetch();
};
}
if (inputElm && this.trigger === 'focus') {
inputElm.addEventListener('focus', () => {
if (!this.check) this.fetch();
});
} else if (this.trigger === 'load') {
window.onload = () => {
if (!this.check) this.fetch();
};
}
document.body.addEventListener('keydown', (e) => {
if (e.defaultPrevented) {
return;
}
const key = e.key || e.keyCode;
if (key === 'Escape' || key === 'Esc' || key === 27) {
this.closeSearch();
}
});
};
} | the_stack |
import { MicroSentryPlugin } from '../models/plugin';
import { BrowserMicroSentryClient } from '../services/browser-micro-sentry-client';
import { fill } from '../utils/fill';
import { Severity } from '@micro-sentry/core';
import { parseUrl } from '../utils/parse-url';
import {
getFetchMethod,
getFetchUrl,
supportsNativeFetch,
} from '../utils/fetch-utils';
import { htmlTreeAsString } from '../utils/html-tree-as-path';
import { safeJoin } from '../utils/safe-join';
export class BreadcrumbPlugin implements MicroSentryPlugin {
private subscriptions: (() => void)[] = [];
private lastHref?: string;
constructor(readonly client: BrowserMicroSentryClient) {
this.initDOM();
this.initConsole();
this.initFetch();
this.initHistory();
this.initXHR();
}
initXHR(): void {
if (!('XMLHttpRequest' in window)) {
return;
}
// eslint-disable-next-line @typescript-eslint/no-this-alias
const self = this;
const xhrproto = XMLHttpRequest.prototype;
fill(xhrproto, 'open', (originalOpen) => {
return function (...args: any[]): void {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
// eslint-disable-next-line @typescript-eslint/no-this-alias
const xhr = this; // tslint:disable-line:no-this-assignment
const url = args[1];
xhr.__sentry_xhr__ = {
method: typeof args[0] === 'string' ? args[0].toUpperCase() : args[0],
url: args[1],
};
// if Sentry key appears in URL, don't capture it as a request
if (
typeof url === 'string' &&
xhr.__sentry_xhr__.method === 'POST' &&
url.match(/sentry_key/)
) {
xhr.__sentry_own_request__ = true;
}
const onreadystatechangeHandler = () => {
if (xhr.readyState === 4) {
try {
// touching statusCode in some platforms throws
// an exception
if (xhr.__sentry_xhr__) {
xhr.__sentry_xhr__.status_code = xhr.status;
}
} catch (e) {
/* do nothing */
}
self.xhrBreadcrumb({
args,
endTimestamp: Date.now(),
startTimestamp: Date.now(),
xhr,
});
}
};
if (
'onreadystatechange' in xhr &&
typeof xhr.onreadystatechange === 'function'
) {
fill(xhr, 'onreadystatechange', (original) => {
return function onreadystatechange(...readyStateArgs: any[]) {
onreadystatechangeHandler();
original.apply(xhr, readyStateArgs);
};
});
} else {
xhr.addEventListener('readystatechange', onreadystatechangeHandler);
}
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
return originalOpen.apply(xhr, args);
};
});
fill(xhrproto, 'send', (originalSend) => {
return function send(...args: any[]): void {
self.xhrBreadcrumb({
args,
startTimestamp: Date.now(),
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
xhr: this,
});
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
return originalSend.apply(this, args);
};
});
}
initHistory(): void {
// eslint-disable-next-line @typescript-eslint/no-this-alias
const self = this;
const oldOnPopState = window.onpopstate;
window.onpopstate = function (
this: WindowEventHandlers,
...args: any[]
): any {
const to = window.location.href;
// keep track of the current URL state, as we always receive only the updated state
const from = self.lastHref;
self.lastHref = to;
self.historyBreadcrumb({
from,
to,
});
if (oldOnPopState) {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
return oldOnPopState.apply(this, args);
}
};
/** @hidden */
function historyReplacementFunction(originalHistoryFunction: any): any {
return function (this: History, ...args: any[]): void {
const url = args.length > 2 ? args[2] : undefined;
if (url) {
// coerce to string (this is what pushState does)
const from = self.lastHref;
const to = String(url);
// keep track of the current URL state, as we always receive only the updated state
self.lastHref = to;
self.historyBreadcrumb({
from,
to,
});
}
return originalHistoryFunction.apply(this, args);
};
}
fill(window.history, 'pushState', historyReplacementFunction);
fill(window.history, 'replaceState', historyReplacementFunction);
}
destroy() {
this.subscriptions.forEach((fn) => fn());
}
private initDOM() {
const clickEventHandler = (event: Event) =>
this.prepareDomEvent('click', event);
const keypressEventHandler = (event: Event) =>
this.prepareDomEvent('keypress', event);
window.document.addEventListener('click', clickEventHandler, false);
window.document.addEventListener('keypress', keypressEventHandler, false);
this.subscriptions.push(() => {
window.document.removeEventListener('click', clickEventHandler, false);
window.document.removeEventListener(
'keypress',
keypressEventHandler,
false
);
});
}
private prepareDomEvent(name: string, event: Event) {
let target;
// Accessing event.target can throw (see getsentry/raven-js#838, #768)
try {
target = event.target
? htmlTreeAsString(event.target as Node)
: htmlTreeAsString((event as unknown) as Node);
} catch (e) {
target = '<unknown>';
}
if (target.length === 0) {
return;
}
this.client.addBreadcrumb({
category: `ui.${name}`,
message: target,
});
}
private initFetch(): void {
if (!supportsNativeFetch()) {
return;
}
fill(window, 'fetch', (originalFetch) => {
return (...args: [any]) => {
const commonHandlerData = {
args,
fetchData: {
method: getFetchMethod(args),
url: getFetchUrl(args),
},
startTimestamp: Date.now(),
};
this.fetchBreadcrumb({
...commonHandlerData,
});
return originalFetch.apply(window, args).then(
(response: Response) => {
this.fetchBreadcrumb({
...commonHandlerData,
endTimestamp: Date.now(),
response,
});
return response;
},
(error: Error) => {
this.fetchBreadcrumb({
...commonHandlerData,
endTimestamp: Date.now(),
error,
});
throw error;
}
);
};
});
}
private fetchBreadcrumb(handlerData: { [key: string]: any }): void {
// We only capture complete fetch requests
if (!handlerData.endTimestamp) {
return;
}
if (
handlerData.fetchData.url.match(/sentry_key/) &&
handlerData.fetchData.method === 'POST'
) {
// We will not create breadcrumbs for fetch requests that contain `sentry_key` (internal sentry requests)
return;
}
if (handlerData.error) {
this.client.addBreadcrumb({
category: 'fetch',
data: handlerData.fetchData,
level: Severity.error,
type: 'http',
});
} else {
this.client.addBreadcrumb({
category: 'fetch',
data: {
...handlerData.fetchData,
status_code: handlerData.response.status,
},
type: 'http',
});
}
}
private xhrBreadcrumb(handlerData: { [key: string]: any }): void {
if (handlerData.endTimestamp) {
// We only capture complete, non-sentry requests
if (handlerData.xhr.__sentry_own_request__) {
return;
}
this.client.addBreadcrumb({
category: 'xhr',
data: handlerData.xhr.__sentry_xhr__,
type: 'http',
});
return;
}
}
private initConsole() {
([
'debug',
'info',
'warn',
'error',
'log',
'assert',
] as (keyof Console)[]).forEach((level) => {
if (!(level in window.console)) {
return;
}
fill(window.console, level, (originalConsoleLevel: () => any) => {
return (...args: any[]) => {
// {args, level};
const breadcrumb = {
category: 'console',
data: {
arguments: args.map((arg) => {
// если аргументы сериализуются с ошибкой
// приводим значение к строке и возвращаем его
try {
JSON.stringify(arg);
return arg;
} catch (e) {
return Object.prototype.toString.call(arg);
}
}),
logger: 'console',
},
level: level as Severity,
message: safeJoin(args, ' '),
};
if (level === 'assert') {
if (args[0] === false) {
breadcrumb.message = `Assertion failed: ${
safeJoin(args.slice(1), ' ') || 'console.assert'
}`;
breadcrumb.data.arguments = args.slice(1);
} else {
// Don't capture a breadcrumb for passed assertions
return;
}
}
this.client.addBreadcrumb(breadcrumb);
// this fails for some browsers. :(
if (originalConsoleLevel) {
Function.prototype.apply.call(
originalConsoleLevel,
window.console,
args
);
}
};
});
});
}
private historyBreadcrumb(handlerData: { [key: string]: any }): void {
let from = handlerData.from;
let to = handlerData.to;
const parsedLoc = parseUrl(window.location.href);
let parsedFrom = parseUrl(from);
const parsedTo = parseUrl(to);
// Initial pushState doesn't provide `from` information
if (!parsedFrom.path) {
parsedFrom = parsedLoc;
}
// Use only the path component of the URL if the URL matches the current
// document (almost all the time when using pushState)
if (
parsedLoc.protocol === parsedTo.protocol &&
parsedLoc.host === parsedTo.host
) {
// tslint:disable-next-line:no-parameter-reassignment
to = parsedTo.relative;
}
if (
parsedLoc.protocol === parsedFrom.protocol &&
parsedLoc.host === parsedFrom.host
) {
// tslint:disable-next-line:no-parameter-reassignment
from = parsedFrom.relative;
}
this.client.addBreadcrumb({
category: 'navigation',
data: {
from,
to,
},
});
}
} | the_stack |
import { render } from '@testing-library/react';
import SEO from '../index';
import React from 'react';
jest.mock('next/head', () => {
return {
__esModule: true,
default: ({ children }: { children: React.ReactNode }) => <>{children}</>
};
});
function checkHavingSameKeys(
elems: NodeListOf<HTMLElement | Element>
): boolean {
return [...elems].every(
elem => elem.getAttribute('key') === elems[0].getAttribute('key')
);
}
describe('title', () => {
test('should render a title element by passing title prop', () => {
render(<SEO title="foo" />);
expect(document.title).toBe('foo');
});
test('should render multiple title elements with same keys when multiple components exits', () => {
render(
<>
<SEO title="first" />
<SEO title="second" />
</>
);
// In this case, Next.js render only second element since both elements have the same key.
// https://nextjs.org/docs/api-reference/next/head
const titles = document.querySelectorAll('title');
expect(titles[0].text).toBe('first');
expect(titles[1].text).toBe('second');
expect(checkHavingSameKeys(titles)).toBe(true);
});
test('should not render title when title prop is not set', () => {
render(<SEO og={{ title: 'foo' }} />);
const titles = document.querySelectorAll('title');
expect(titles.length).toBe(0);
});
});
describe('canonical', () => {
const selector = `link[rel="canonical"]`;
test('should render canonical links by passing canonical prop', () => {
render(<SEO title="foo" canonical="http://example.com/foo" />);
const canonicalLinks = document.querySelectorAll(selector);
expect(canonicalLinks.length).toBe(1);
expect(canonicalLinks[0].getAttribute('href')).toBe(
'http://example.com/foo'
);
});
test('should render multiple canonical links with same keys when multiple components exits', () => {
render(
<>
<SEO canonical="http://example.com/first" />
<SEO canonical="http://example.com/second" />
</>
);
// In this case, Next.js render only second element since both elements have the same key.
const canonicalLinks = document.querySelectorAll(selector);
expect(checkHavingSameKeys(canonicalLinks)).toBe(true);
expect(canonicalLinks[1].getAttribute('href')).toBe(
'http://example.com/second'
);
});
test('should not render canonical links when canonical prop is not set', () => {
render(<SEO title="foo" />);
const titles = document.querySelectorAll(selector);
expect(titles.length).toBe(0);
});
});
describe('robots', () => {
const selector = `meta[name="robots"]`;
test('should render meta robots with `noindex` when robots prop value is `noindex`', () => {
render(<SEO robots="noindex" />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(1);
expect(metaElems[0].getAttribute('content')).toBe('noindex');
});
test('should render meta robots with `noindex, nofollow` when robots prop value is `noindex, nofollow', () => {
render(<SEO robots="noindex, nofollow" />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems[0].getAttribute('content')).toBe('noindex, nofollow');
});
test('should render multiple meta robots with same keys when multiple components exits', () => {
render(
<>
<SEO robots="noindex" />
<SEO robots="noindex, nofollow" />
</>
);
// In this case, Next.js render only second element since both elements have the same key.
const metaElems = document.querySelectorAll(selector);
expect(checkHavingSameKeys(metaElems)).toBe(true);
expect(metaElems[1].getAttribute('content')).toBe('noindex, nofollow');
});
});
describe('description', () => {
const selector = `meta[name="description"]`;
test('should render meta description by passing `description` prop', () => {
render(<SEO description="foo" />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(1);
expect(metaElems[0].getAttribute('content')).toBe('foo');
});
test('should use first 150 characters for meta description', () => {
const longText = `But I must explain to you how all this mistaken idea of denouncing pleasure and praising pain was born and I will give you a complete account of the system, and expound the actual teachings of the gre`;
render(<SEO description={longText} />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems[0].getAttribute('content')).toBe(longText.substr(0, 150));
});
test('should truncate description after `maxDescriptionCharacters`', () => {
const longText = `But I must explain to you how all this mistaken idea of denouncing pleasure and praising pain was born and I will give you a complete account of the system, and expound the actual teachings of the gre`;
render(<SEO description={longText} maxDescriptionCharacters={100} />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems[0].getAttribute('content')).toBe(longText.substr(0, 100));
});
test('should render multiple meta robots with same keys when multiple components exits', () => {
render(
<>
<SEO description="first" />
<SEO description="second" />
</>
);
// In this case, Next.js render only second element since both elements have the same key.
const metaElems = document.querySelectorAll(selector);
expect(checkHavingSameKeys(metaElems)).toBe(true);
expect(metaElems[1].getAttribute('content')).toBe('second');
});
});
describe('twitter', () => {
describe('card', () => {
const selector = `meta[name="twitter:card"]`;
test('should render meta twitter:card by passing `twitter.card` prop', () => {
render(<SEO twitter={{ card: 'summary' }} />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(1);
expect(metaElems[0].getAttribute('content')).toBe('summary');
});
test('should render multiple meta twitter:card with same keys when multiple components exits', () => {
render(
<>
<SEO twitter={{ card: 'summary' }} />
<SEO twitter={{ card: 'summary_large_image' }} />
</>
);
// In this case, Next.js render only second element since both elements have the same key.
const metaElems = document.querySelectorAll(selector);
expect(checkHavingSameKeys(metaElems)).toBe(true);
expect(metaElems[1].getAttribute('content')).toBe('summary_large_image');
});
});
describe('site', () => {
const selector = `meta[name="twitter:site"]`;
test('should render meta twitter:site by passing `twitter.site` prop', () => {
render(<SEO twitter={{ site: '@catnose99' }} />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(1);
expect(metaElems[0].getAttribute('content')).toBe('@catnose99');
});
test('should render multiple meta twitter:card with same keys when multiple components exits', () => {
render(
<>
<SEO twitter={{ site: '@first' }} />
<SEO twitter={{ site: '@second' }} />
</>
);
// In this case, Next.js render only second element since both elements have the same key.
const metaElems = document.querySelectorAll(selector);
expect(checkHavingSameKeys(metaElems)).toBe(true);
expect(metaElems[1].getAttribute('content')).toBe('@second');
});
});
});
describe('customMetaTags', () => {
test('should render meta elements correctly by passing `customMetaTags` props', () => {
render(
<SEO
customMetaTags={[
{
name: 'name1',
content: 'content1'
},
{
name: 'name2',
content: 'content2'
}
]}
/>
);
const metaElem1 = document.querySelector(`meta[name="name1"]`);
expect(metaElem1?.getAttribute('content')).toBe('content1');
const metaElem2 = document.querySelector(`meta[name="name2"]`);
expect(metaElem2?.getAttribute('content')).toBe('content2');
});
});
describe('customLinkTags', () => {
test('should render link elements correctly by passing `customLinkTags` props', () => {
render(
<SEO
customLinkTags={[
{
rel: 'stylesheet',
href: 'https://example.com/first.css'
},
{
rel: 'stylesheet',
href: 'https://example.com/second.css'
}
]}
/>
);
const linkElems = document.querySelectorAll(`link[rel="stylesheet"]`);
expect(linkElems[0].getAttribute('href')).toBe(
'https://example.com/first.css'
);
expect(linkElems[1].getAttribute('href')).toBe(
'https://example.com/second.css'
);
});
});
describe('og', () => {
describe('title', () => {
const selector = `meta[property="og:title"]`;
test('should render og:title by passing `og.title` prop', () => {
render(<SEO og={{ title: 'foo' }} />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(1);
expect(metaElems[0].getAttribute('content')).toBe('foo');
});
test('should render multiple og:title with same keys when multiple components exits', () => {
render(
<>
<SEO og={{ title: 'first' }} />
<SEO og={{ title: 'second' }} />
</>
);
// In this case, Next.js render only second element since both elements have the same key.
const metaElems = document.querySelectorAll(selector);
expect(metaElems[0].getAttribute('content')).toBe('first');
expect(metaElems[1].getAttribute('content')).toBe('second');
expect(checkHavingSameKeys(metaElems)).toBe(true);
});
test('should not render og:title when `og:title` prop is not set', () => {
render(<SEO />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(0);
});
test('should fallback to `title` prop', () => {
render(<SEO title="title" />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(1);
expect(metaElems[0].getAttribute('content')).toBe('title');
});
});
describe('description', () => {
const selector = `meta[property="og:description"]`;
test('should render og:description by passing `og.description` prop', () => {
render(<SEO og={{ description: 'foo' }} />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(1);
expect(metaElems[0].getAttribute('content')).toBe('foo');
});
test('should render multiple og:description with same keys when multiple components exits', () => {
render(
<>
<SEO og={{ description: 'first' }} />
<SEO og={{ description: 'second' }} />
</>
);
// In this case, Next.js render only second element since both elements have the same key.
const metaElems = document.querySelectorAll(selector);
expect(metaElems[0].getAttribute('content')).toBe('first');
expect(metaElems[1].getAttribute('content')).toBe('second');
expect(checkHavingSameKeys(metaElems)).toBe(true);
});
test('should not render og:description when `og:description` prop is not set', () => {
render(<SEO />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(0);
});
test('should fallback to `description` prop', () => {
render(<SEO description="description" />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(1);
expect(metaElems[0].getAttribute('content')).toBe('description');
});
test('should truncate description after `maxDescriptionCharacters`', () => {
const longText = `But I must explain to you how all this mistaken idea of denouncing pleasure and praising pain was born and I will give you a complete account of the system, and expound the actual teachings of the gre`;
render(
<SEO og={{ description: longText }} maxDescriptionCharacters={100} />
);
const metaElems = document.querySelectorAll(selector);
expect(metaElems[0].getAttribute('content')).toBe(
longText.substr(0, 100)
);
});
});
describe('url', () => {
const selector = `meta[property="og:url"]`;
test('should render og:url by passing `og.url` prop', () => {
render(<SEO og={{ url: 'https://example.com/foo' }} />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(1);
expect(metaElems[0].getAttribute('content')).toBe(
'https://example.com/foo'
);
});
test('should render multiple og:url with same keys when multiple components exits', () => {
render(
<>
<SEO og={{ url: 'https://example.com/first' }} />
<SEO og={{ url: 'https://example.com/second' }} />
</>
);
// In this case, Next.js render only second element since both elements have the same key.
const metaElems = document.querySelectorAll(selector);
expect(metaElems[0].getAttribute('content')).toBe(
'https://example.com/first'
);
expect(metaElems[1].getAttribute('content')).toBe(
'https://example.com/second'
);
expect(checkHavingSameKeys(metaElems)).toBe(true);
});
test('should not render og:url when `og:url` prop is not set', () => {
render(<SEO />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(0);
});
test('should fallback to `canonical` prop', () => {
render(<SEO canonical="https://example.com/foo" />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(1);
expect(metaElems[0].getAttribute('content')).toBe(
'https://example.com/foo'
);
});
});
describe('image', () => {
const selector = `meta[property="og:image"]`;
test('should render og:image by passing `og.image` prop', () => {
render(<SEO og={{ image: 'https://example.com/foo.png' }} />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(1);
expect(metaElems[0].getAttribute('content')).toBe(
'https://example.com/foo.png'
);
});
test('should render multiple og:image with same keys when multiple components exits', () => {
render(
<>
<SEO og={{ image: 'https://example.com/first.png' }} />
<SEO og={{ image: 'https://example.com/second.png' }} />
</>
);
// In this case, Next.js render only second element since both elements have the same key.
const metaElems = document.querySelectorAll(selector);
expect(metaElems[0].getAttribute('content')).toBe(
'https://example.com/first.png'
);
expect(metaElems[1].getAttribute('content')).toBe(
'https://example.com/second.png'
);
expect(checkHavingSameKeys(metaElems)).toBe(true);
});
test('should not render og:image when `og:image` prop is not set', () => {
render(<SEO />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(0);
});
});
describe('type', () => {
const selector = `meta[property="og:type"]`;
test('should render og:type by passing `og.type` prop', () => {
render(<SEO og={{ type: 'article' }} />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(1);
expect(metaElems[0].getAttribute('content')).toBe('article');
});
test('should render multiple og:type with same keys when multiple components exits', () => {
render(
<>
<SEO og={{ type: 'article' }} />
<SEO og={{ type: 'website' }} />
</>
);
// In this case, Next.js render only second element since both elements have the same key.
const metaElems = document.querySelectorAll(selector);
expect(metaElems[0].getAttribute('content')).toBe('article');
expect(metaElems[1].getAttribute('content')).toBe('website');
expect(checkHavingSameKeys(metaElems)).toBe(true);
});
test('should not render og:type when `og:type` prop is not set', () => {
render(<SEO />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(0);
});
});
describe('type', () => {
const selector = `meta[property="og:site_name"]`;
test('should render og:site_name by passing `og.siteName` prop', () => {
render(<SEO og={{ siteName: 'foo' }} />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(1);
expect(metaElems[0].getAttribute('content')).toBe('foo');
});
test('should render multiple og:site_name with same keys when multiple components exits', () => {
render(
<>
<SEO og={{ siteName: 'first' }} />
<SEO og={{ siteName: 'second' }} />
</>
);
// In this case, Next.js render only second element since both elements have the same key.
const metaElems = document.querySelectorAll(selector);
expect(metaElems[0].getAttribute('content')).toBe('first');
expect(metaElems[1].getAttribute('content')).toBe('second');
expect(checkHavingSameKeys(metaElems)).toBe(true);
});
test('should not render og:site_name when `og:siteName` prop is not set', () => {
render(<SEO />);
const metaElems = document.querySelectorAll(selector);
expect(metaElems.length).toBe(0);
});
});
}); | the_stack |
import { expect } from 'chai';
import { CalcMutationsOperator, Field, Model, Severity, TypeKind } from '../../../src/model';
import {
expectSingleErrorToInclude,
expectSingleWarningToInclude,
expectToBeValid,
validate
} from './validation-utils';
describe('Field', () => {
const model = new Model({
types: [
{
name: 'Address',
kind: TypeKind.VALUE_OBJECT,
fields: [
{
name: 'name',
typeName: 'String'
}
]
},
{
name: 'Country',
kind: TypeKind.ROOT_ENTITY,
keyFieldName: 'isoCode',
fields: [
{
name: 'isoCode',
typeName: 'String'
}
]
},
{
name: 'Shipment',
kind: TypeKind.ROOT_ENTITY,
fields: [
{
name: 'deliveries',
typeName: 'Delivery',
isList: true,
isRelation: true
},
{
name: 'delivery',
typeName: 'Delivery',
isRelation: true
},
{
name: 'deliveryNonRelation',
typeName: 'Delivery'
},
{
name: 'deliveryWithInverseOf',
typeName: 'Delivery',
isRelation: true,
inverseOfFieldName: 'shipment'
},
{
name: 'handlingUnits',
typeName: 'HandlingUnit',
isRelation: true,
isList: true
}
]
},
{
name: 'Delivery',
kind: TypeKind.ROOT_ENTITY,
fields: [
{
name: 'shipment',
typeName: 'Shipment',
isRelation: true
},
{
name: 'lineItems',
typeName: 'DeliveryLineItem',
isList: true
}
]
},
{
name: 'HandlingUnit',
kind: TypeKind.ROOT_ENTITY,
fields: []
},
{
name: 'Item',
kind: TypeKind.CHILD_ENTITY,
fields: []
},
{
name: 'DangerousGoodsInfo',
kind: TypeKind.ENTITY_EXTENSION,
fields: []
},
{
name: 'DeliveryLineItem',
kind: TypeKind.CHILD_ENTITY,
fields: [
{
name: 'parent',
isParentField: true,
typeName: 'Delivery'
}
]
}
],
permissionProfiles: [
{
profiles: {
default: {
permissions: [
{
roles: ['accounting'],
access: 'readWrite'
}
]
},
accounting: {
permissions: [
{
roles: ['accounting'],
access: 'readWrite'
}
]
}
}
}
]
});
const shipmentType = model.getRootEntityTypeOrThrow('Shipment');
const deliveryType = model.getRootEntityTypeOrThrow('Delivery');
const handlingUnitType = model.getRootEntityTypeOrThrow('HandlingUnit');
const addressType = model.getValueObjectTypeOrThrow('Address');
const itemType = model.getChildEntityTypeOrThrow('Item');
const dangerousGoodsInfoType = model.getEntityExtensionTypeOrThrow('DangerousGoodsInfo');
describe('with type', () => {
it('accepts built-in type', () => {
const field = new Field(
{
name: 'deliveryNumber',
typeName: 'String'
},
deliveryType
);
expectToBeValid(field);
});
it('accepts user-defined types', () => {
const field = new Field(
{
name: 'address',
typeName: 'Address'
},
deliveryType
);
expectToBeValid(field);
expect(field.type).to.equal(model.getType('Address'));
expect(field.hasValidType).to.be.true;
});
it('reports undefined type', () => {
const field = new Field(
{
name: 'deliveryNumber',
typeName: 'UndefinedType'
},
deliveryType
);
expectSingleErrorToInclude(field, 'UndefinedType');
});
it('falls back to pseudo type if typeName is not found', () => {
// this is important so that the model does not break if it is invalid
const field = new Field(
{
name: 'deliveryNumber',
typeName: 'Undefined'
},
deliveryType
);
expect(field.type).not.to.be.undefined;
expect(field.type.name).to.equal('Undefined');
expect(field.hasValidType).to.be.false;
});
});
describe('with name', () => {
it('rejects empty names', () => {
const field = new Field(
{
name: '',
typeName: 'String'
},
deliveryType
);
expectSingleErrorToInclude(field, `Field name is empty.`);
});
it('accepts one-character names', () => {
// describe('me', () => it('is not stupid') );
const field = new Field(
{
name: 'a',
typeName: 'String'
},
deliveryType
);
expectToBeValid(field);
});
it('warns about names including an underscore', () => {
const field = new Field(
{
name: 'some_field_name',
typeName: 'String'
},
deliveryType
);
expectSingleWarningToInclude(field, `Field names should not include underscores.`);
});
it('rejects names starting with an underscore', () => {
const field = new Field(
{
name: '_internal',
typeName: 'String'
},
deliveryType
);
expectSingleErrorToInclude(field, `Field names cannot start with an underscore.`);
});
it('warns about names starting with an uppercase character', () => {
const field = new Field(
{
name: 'ThisIsNotAFieldName',
typeName: 'String'
},
deliveryType
);
expectSingleWarningToInclude(field, `Field names should start with a lowercase character.`);
});
});
describe('with root entity type', () => {
it('rejects fields with root entity type without @relation or @reference', () => {
const field = new Field(
{
name: 'country',
typeName: 'Country'
},
deliveryType
);
expectSingleErrorToInclude(field, 'root entity');
});
it('rejects fields with both @relation and @reference', () => {
const field = new Field(
{
name: 'country',
typeName: 'Country',
isRelation: true,
isReference: true
},
deliveryType
);
expectSingleErrorToInclude(field, '@reference and @relation cannot be combined');
});
});
describe('with relations', () => {
it('rejects @relation on non-root-entity-type as declaring type', () => {
const field = new Field(
{
name: 'handlingUnit',
typeName: 'HandlingUnit',
isRelation: true
},
addressType
);
expectSingleErrorToInclude(
field,
'Relations can only be defined on root entity types. Consider using @reference instead'
);
});
it('rejects @relation to non-root-entity type', () => {
const field = new Field(
{
name: 'address',
typeName: 'Address',
isRelation: true
},
deliveryType
);
expectSingleErrorToInclude(
field,
'Type "Address" cannot be used with @relation because it is not a root entity type'
);
});
describe('without inverseOf', () => {
it('accepts', () => {
const field = deliveryType.getFieldOrThrow('shipment'); // need a field woven into the model here
expectToBeValid(field);
});
it('provides the other field as inverseField', () => {
const field = deliveryType.getFieldOrThrow('shipment'); // need a field woven into the model here
expect(field.inverseField).to.equal(shipmentType.getFieldOrThrow('deliveryWithInverseOf'));
});
it('resolves inverseField to undefined if there is none', () => {
const field = shipmentType.getFieldOrThrow('handlingUnits');
expect(field.inverseField).to.be.undefined;
});
it('rejects if there are multiple inverse fields', () => {
const m = new Model({
types: [
{
name: 'Delivery',
kind: TypeKind.ROOT_ENTITY,
fields: [
{
name: 'packager',
typeName: 'Person',
isRelation: true,
inverseOfFieldName: 'delivery'
},
{
name: 'shipper',
typeName: 'Person',
isRelation: true,
inverseOfFieldName: 'delivery'
}
]
},
{
name: 'Person',
kind: TypeKind.ROOT_ENTITY,
fields: [
{
name: 'delivery',
typeName: 'Delivery',
isRelation: true
}
]
}
]
});
const field = m.getRootEntityTypeOrThrow('Person').getFieldOrThrow('delivery');
const result = validate(field);
expect(result.messages.length).to.equal(2);
for (const message of result.messages) {
expect(message.severity).to.equal(Severity.Error);
expect(message.message).to.equal(
'Multiple fields ("Delivery.packager", "Delivery.shipper") declare inverseOf to "Person.delivery".'
);
}
});
describe('warns if there is an unrelated inverse relation', () => {
const field = shipmentType.getFieldOrThrow('delivery');
const result = validate(field);
expect(result.messages.length, result.toString()).to.equal(1);
expect(result.messages[0].severity).to.equal(Severity.Warning);
expect(result.messages[0].message).to.equal(
'This field and "Delivery.shipment" define separate relations. Consider using the "inverseOf" argument to add a backlink to an existing relation.'
);
});
});
describe('with inverseOf', () => {
describe('that is not a list', () => {
const field = new Field(
{
name: 'shipment',
typeName: 'Shipment',
isRelation: true,
inverseOfFieldName: 'delivery'
},
deliveryType
);
it('accepts', () => {
expectToBeValid(field);
});
it('provides the field in inverseOf', () => {
expect(field.inverseOf).to.equal(shipmentType.getField('delivery'));
});
});
describe('that is a list', () => {
const field = new Field(
{
name: 'shipment',
typeName: 'Shipment',
isRelation: true,
inverseOfFieldName: 'deliveries'
},
deliveryType
);
it('accepts', () => {
expectToBeValid(field);
});
it('provides the field in inverseOf', () => {
expect(field.inverseOf).to.equal(shipmentType.getField('deliveries'));
});
});
it('rejects inverseOf to undefined field', () => {
const field = new Field(
{
name: 'shipment',
typeName: 'Shipment',
isRelation: true,
inverseOfFieldName: 'undefinedField'
},
deliveryType
);
expectSingleErrorToInclude(field, 'Field "undefinedField" does not exist on type "Shipment"');
});
it('rejects inverseOf to non-relation field', () => {
const field = new Field(
{
name: 'shipment',
typeName: 'Shipment',
isRelation: true,
inverseOfFieldName: 'deliveryNonRelation'
},
deliveryType
);
expectSingleErrorToInclude(
field,
'Field "Shipment.deliveryNonRelation" used as inverse field of "Delivery.shipment" does not have the @relation directive'
);
});
it('rejects inverseOf to field that has inverseOf set', () => {
const field = new Field(
{
name: 'shipment',
typeName: 'Shipment',
isRelation: true,
inverseOfFieldName: 'deliveryWithInverseOf'
},
deliveryType
);
expectSingleErrorToInclude(
field,
'Field "Shipment.deliveryWithInverseOf" used as inverse field of "Delivery.shipment" should not declare inverseOf itself'
);
});
it("rejects inverseOf to field with different type than the field's declaring type", () => {
const field = new Field(
{
name: 'shipment',
typeName: 'Shipment',
isRelation: true,
inverseOfFieldName: 'handlingUnits'
},
deliveryType
);
expectSingleErrorToInclude(
field,
'Field "Shipment.handlingUnits" used as inverse field of "Delivery.shipment" has named type "HandlingUnit" but should be of type "Delivery"'
);
});
it('does not set inverseField', () => {
const field = new Field(
{
name: 'shipment',
typeName: 'Shipment',
isRelation: true,
inverseOfFieldName: 'delivery'
},
deliveryType
);
expect(field.inverseField).to.be.undefined;
});
});
});
describe('with references', () => {
it('accepts @reference to root entity type', () => {
const field = new Field(
{
name: 'country',
typeName: 'Country',
isReference: true
},
deliveryType
);
expectToBeValid(field);
});
it('rejects @reference to root entity type without @key field', () => {
const field = new Field(
{
name: 'handlingUnit',
typeName: 'HandlingUnit',
isReference: true
},
deliveryType
);
expectSingleErrorToInclude(
field,
`"HandlingUnit" cannot be used as @reference type because it does not have a field annotated with @key`
);
});
it('rejects @reference to value object type', () => {
const field = new Field(
{
name: 'address',
typeName: 'Address',
isReference: true
},
deliveryType
);
expectSingleErrorToInclude(
field,
`"Address" cannot be used as @reference type because is not a root entity type.`
);
});
it('rejects @reference on list field', () => {
const field = new Field(
{
name: 'countries',
typeName: 'Country',
isReference: true,
isList: true
},
deliveryType
);
expectSingleErrorToInclude(
field,
`@reference is not supported with list types. Consider wrapping the reference in a child entity or value object type.`
);
});
});
describe('with entity extension type', () => {
it('accepts entity extensions embedded in root entities', () => {
const field = new Field(
{
name: 'items',
typeName: 'DangerousGoodsInfo'
},
deliveryType
);
expectToBeValid(field);
});
it('accepts entity extensions embedded in child entities', () => {
const field = new Field(
{
name: 'items',
typeName: 'DangerousGoodsInfo'
},
itemType
);
expectToBeValid(field);
});
it('accepts entity extensions embedded in entity extensions', () => {
const field = new Field(
{
name: 'items',
typeName: 'DangerousGoodsInfo'
},
dangerousGoodsInfoType
);
expectToBeValid(field);
});
it('rejects entity extensions embedded in value objects', () => {
const field = new Field(
{
name: 'items',
typeName: 'DangerousGoodsInfo'
},
addressType
);
expectSingleErrorToInclude(
field,
`Type "DangerousGoodsInfo" is an entity extension type and cannot be used within value object types. Change "Address" to an entity extension type or use a value object type for "items".`
);
});
it('rejects entity extension types on list fields', () => {
const field = new Field(
{
name: 'items',
typeName: 'DangerousGoodsInfo',
isList: true
},
deliveryType
);
expectSingleErrorToInclude(
field,
`Type "DangerousGoodsInfo" is an entity extension type and cannot be used in a list. Change the field type to "DangerousGoodsInfo" (without brackets), or use a child entity or value object type instead.`
);
});
});
describe('with child entity type', () => {
it('accepts child entities embedded in root entities', () => {
const field = new Field(
{
name: 'items',
typeName: 'Item',
isList: true
},
deliveryType
);
expectToBeValid(field);
});
it('accepts child entities embedded in child entities', () => {
const field = new Field(
{
name: 'items',
typeName: 'Item',
isList: true
},
itemType
);
expectToBeValid(field);
});
it('accepts child entities embedded in entity extensions', () => {
const field = new Field(
{
name: 'items',
typeName: 'Item',
isList: true
},
dangerousGoodsInfoType
);
expectToBeValid(field);
});
it('rejects child entities embedded in value objects', () => {
const field = new Field(
{
name: 'items',
typeName: 'Item',
isList: true
},
addressType
);
expectSingleErrorToInclude(
field,
`Type "Item" is a child entity type and cannot be used within value object types. Change "Address" to an entity extension type or use a value object type for "items".`
);
});
it('rejects child entity types on non-list fields', () => {
const field = new Field(
{
name: 'items',
typeName: 'Item',
isList: false
},
deliveryType
);
expectSingleErrorToInclude(
field,
`Type "Item" is a child entity type and can only be used in a list. Change the field type to "[Item]", or use an entity extension or value object type instead.`
);
});
});
describe('with default value', () => {
it('accepts on scalar types', () => {
const field = new Field(
{
name: 'amount',
typeName: 'Int',
defaultValue: 123
},
itemType
);
const res = validate(field);
expect(res.messages.length, res.toString()).to.equal(1);
expect(res.messages[0].severity, res.toString()).to.equal(Severity.Info); // warning about no type checking for default values
});
it('accepts on value object types', () => {
const field = new Field(
{
name: 'address',
typeName: 'Address',
defaultValue: 123
},
deliveryType
);
const res = validate(field);
expect(res.messages.length, res.toString()).to.equal(1);
expect(res.messages[0].severity, res.toString()).to.equal(Severity.Info); // warning about no type checking for default values
});
});
describe('with permissions', () => {
describe('with permission profile', () => {
const field = new Field(
{
name: 'totalAmount',
typeName: 'Int',
permissions: {
permissionProfileName: 'accounting'
}
},
deliveryType
);
it('accepts', () => {
expectToBeValid(field);
});
it('resolves permission profile', () => {
expect(field.permissionProfile).to.equal(model.rootNamespace.getPermissionProfileOrThrow('accounting'));
});
});
it('accepts direct role specifier', () => {
const field = new Field(
{
name: 'totalAmount',
typeName: 'Int',
permissions: {
roles: {
read: ['accounting'],
readWrite: ['admin']
}
}
},
deliveryType
);
expectToBeValid(field);
});
it('rejects combining roles and permission profiles', () => {
const field = new Field(
{
name: 'totalAmount',
typeName: 'Int',
permissions: {
permissionProfileName: 'accounting',
roles: {
read: ['accounting'],
readWrite: ['admin']
}
}
},
deliveryType
);
const result = validate(field);
expect(result.messages.length).to.equal(2);
for (const message of result.messages) {
expect(message.severity).to.equal(Severity.Error);
expect(message.message).to.equal(`Permission profile and explicit role specifiers cannot be combined.`);
}
});
it('rejects missing permission profile', () => {
const field = new Field(
{
name: 'totalAmount',
typeName: 'Int',
permissions: {
permissionProfileName: 'undefined'
}
},
deliveryType
);
expectSingleErrorToInclude(field, `Permission profile "undefined" not found`);
});
});
describe('with calc mutations', () => {
it('accepts ADD and MULTIPLY on Int', () => {
const field = new Field(
{
name: 'amount',
typeName: 'Int',
calcMutationOperators: [CalcMutationsOperator.ADD, CalcMutationsOperator.MULTIPLY]
},
itemType
);
expectToBeValid(field);
});
it('accepts APPEND on String', () => {
const field = new Field(
{
name: 'log',
typeName: 'String',
calcMutationOperators: [CalcMutationsOperator.APPEND]
},
deliveryType
);
expectToBeValid(field);
});
it('rejects APPEND on Boolean', () => {
const field = new Field(
{
name: 'isConfirmed',
typeName: 'Boolean',
calcMutationOperators: [CalcMutationsOperator.APPEND]
},
deliveryType
);
expectSingleErrorToInclude(field, `Type "Boolean" does not support any calc mutation operators.`);
});
it('rejects APPEND on Int', () => {
const field = new Field(
{
name: 'amount',
typeName: 'Int',
calcMutationOperators: [CalcMutationsOperator.APPEND]
},
deliveryType
);
expectSingleErrorToInclude(
field,
`Calc mutation operator "APPEND" is not supported on type "Int" (supported operators: "MULTIPLY", "DIVIDE", "ADD", "SUBTRACT", "MODULO").`
);
});
it('rejects MULTIPLY on String', () => {
const field = new Field(
{
name: 'deliveryNumber',
typeName: 'String',
calcMutationOperators: [CalcMutationsOperator.MULTIPLY]
},
deliveryType
);
expectSingleErrorToInclude(
field,
`Calc mutation operator "MULTIPLY" is not supported on type "String" (supported operators: "APPEND", "PREPEND").`
);
});
it('rejects APPEND on lists', () => {
const field = new Field(
{
name: 'amount',
typeName: 'String',
isList: true,
calcMutationOperators: [CalcMutationsOperator.APPEND]
},
deliveryType
);
expectSingleErrorToInclude(field, `Calc mutations are not supported on list fields.`);
});
});
}); | the_stack |
import {
Array1D, Array4D,
CostReduction,
FeedEntry,
Graph,
InCPUMemoryShuffledInputProviderBuilder,
InMemoryDataset,
MetricReduction,
NDArray,
NDArrayInitializer,
NDArrayMathGPU,
Optimizer,
Session,
Scalar,
SGDOptimizer,
Tensor,
util, VarianceScalingInitializer, ZerosInitializer,
} from 'deeplearn';
import {EventEmitter} from 'eventemitter3';
import {GraphWeights} from './GraphSaverLoader';
import SpeechCommandDataset from './SpeechCommandDataset';
// Values from model-builder.ts.
const BATCH_SIZE = 64;
const NUM_BATCHES = 10000;
const MIN_COST_VALUE = 0.1;
const TRAIN_TEST_RATIO = 0.9;
const LEARNING_RATE = 0.01;
export default class ModelBuilder {
modelInitialized: boolean
session: Session
graph: Graph
optimizer: Optimizer
math: NDArrayMathGPU
inputShape: number[]
labelShape: number[]
learningRate: number
inputTensor: Tensor
labelTensor: Tensor
regularizationTensor: Tensor
predictionTensor: Tensor
costTensor: Tensor
accuracyTensor: Tensor
nodeIndex: number
dataSet: SpeechCommandDataset
totalTimeSec: number
isTraining: boolean
progress: EventEmitter
constructor(inputShape, labelShape, dataSet=null) {
this.nodeIndex = 0;
this.dataSet = dataSet;
// Dimensions of the Mel spectrogram we feed into this model.
this.inputShape = inputShape;
// Shape of the 1D one-hot encoded labels.
this.labelShape = labelShape;
this.isTraining = false;
this.math = new NDArrayMathGPU();
this.progress = new EventEmitter();
this.learningRate = LEARNING_RATE;
}
createModel(loadedWeights: GraphWeights=null) {
if (this.session != null) {
this.session.dispose();
}
// Reset the node index.
this.nodeIndex = 0;
this.modelInitialized = false;
this.graph = new Graph();
const g = this.graph;
this.inputTensor = g.placeholder('input', this.inputShape);
this.labelTensor = g.placeholder('label', this.labelShape);
// Construct the hidden layers.
let network = this.inputTensor;
network = this.addConv2d(g, network, 8, loadedWeights);
const reg1 = this.addRegularization(g, network);
network = this.addRelu(g, network);
network = this.addMaxPool(g, network);
network = this.addConv2d(g, network, 16, loadedWeights)
const reg2 = this.addRegularization(g, network);
network = this.addRelu(g, network);
network = this.addMaxPool(g, network);
network = this.addFlatten(g, network);
network = this.addFullyConnected(g, network, loadedWeights);
const reg3 = this.addRegularization(g, network);
this.predictionTensor = network;
// Add a regularization parameter.
this.regularizationTensor = g.add(reg1, reg2, reg3);
const softmaxCrossEntropyCostTensor =
g.softmaxCrossEntropyCost(this.predictionTensor, this.labelTensor);
//this.costTensor = g.add(softmaxCrossEntropyCostTensor, this.regularizationTensor);
this.costTensor = softmaxCrossEntropyCostTensor;
this.accuracyTensor =
g.argmaxEquals(this.predictionTensor, this.labelTensor);
this.session = new Session(g, this.math);
this.modelInitialized = true;
}
setLearningRate(learningRate: number) {
this.learningRate = learningRate;
}
async startTraining() {
console.log('startTraining');
const trainingData = this.getTrainingData();
const trainingShuffledInputProviderGenerator =
new InCPUMemoryShuffledInputProviderBuilder(trainingData);
const [trainingInputProvider, trainingLabelProvider] =
trainingShuffledInputProviderGenerator.getInputProviders();
const trainingFeeds = [
{tensor: this.inputTensor, data: trainingInputProvider},
{tensor: this.labelTensor, data: trainingLabelProvider}
];
this.isTraining = true;
let minCost = Infinity;
let maxAccuracy = -Infinity;
// Training is broken up into batches.
// Before we start training, we need to provide an optimizer. This is the
// object that is responsible for updating weights. The learning rate param
// is a value that represents how large of a step to make when updating
// weights. If this is too big, you may overstep and oscillate. If it is too
// small, the model may take a long time to train.
const optimizer = new SGDOptimizer(this.learningRate);
console.log(`Training ${NUM_BATCHES} batches of size ${BATCH_SIZE}` +
` at rate: ${this.learningRate}.`);
for (let i = 0; i < NUM_BATCHES; i++) {
// Train takes a cost tensor to minimize; this call trains one batch and
// returns the average cost of the batch as a Scalar.
const costValue = this.session.train(
this.costTensor, trainingFeeds,
BATCH_SIZE, optimizer, CostReduction.MEAN);
const cost = parseFloat(await costValue.data());
console.log(`#${i}: cost: ${cost.toFixed(3)}.`);
if (cost < minCost) {
minCost = cost;
}
if (!this.isTraining) {
break;
}
if (i % 20 == 0) {
const accuracy = await this.getTestAccuracy(50);
if (accuracy > maxAccuracy) {
maxAccuracy = accuracy;
}
console.log(`#${i}: accuracy: ${accuracy.toFixed(3)},`
+ ` maxAccuracy: ${maxAccuracy.toFixed(3)}.`);
this.progress.emit('accuracy', {accuracy, maxAccuracy});
}
this.progress.emit('cost', {cost, minCost});
}
this.isTraining = false;
}
async getTestAccuracy(limit?: number) {
const testData = this.getTestData();
const totalCount = testData[0].length;
if (!limit) {
limit = totalCount;
}
const count = Math.min(limit, totalCount);
let accurateCount = 0;
for (let i = 0; i < count; i++) {
const ind = Math.floor(Math.random() * totalCount);
const input = testData[0][ind];
const label = testData[1][ind];
const inferenceFeeds = [
{tensor: this.inputTensor, data: input},
{tensor: this.labelTensor, data: label},
];
const result = this.session.eval(this.accuracyTensor, inferenceFeeds);
/*
// Output regularization value for debugging.
const regularization = this.session.eval(this.regularizationTensor, inferenceFeeds);
console.log('regularizationTensor', regularization.dataSync());
*/
const accuracy = await result.data()
if (accuracy[0]) {
accurateCount += 1;
}
}
return (accurateCount / count);
}
stopTraining() {
console.log('Stopped training!');
this.isTraining = false;
}
private addConv2d(g: Graph, network: Tensor, outputDepth: number,
init: GraphWeights=null) {
// 5x5 square filter.
const fieldSize = 5;
const stride = 1;
const zeroPad = 2;
const wShape =
[fieldSize, fieldSize, network.shape[2], outputDepth];
const wName = `save-conv2d-${this.nodeIndex}-w`;
const bName = `save-conv2d-${this.nodeIndex}-b`;
let w = NDArray.randTruncatedNormal(wShape, 0, 0.1);
let b = Array1D.zeros([outputDepth]);
if (init) {
w = init[wName];
b = init[bName];
}
const wTensor = g.variable(wName, w);
const bTensor = g.variable(bName, b);
this.nodeIndex += 1;
return g.conv2d(
network, wTensor, bTensor, fieldSize, outputDepth,
stride, zeroPad);
}
private addRelu(g: Graph, network: Tensor) {
return g.relu(network);
}
private addMaxPool(g: Graph, network: Tensor) {
const fieldSize = 2;
// Was previously stride=2 in the MNIST network. Changed this value because
// of the input size.
const stride = 1;
const zeroPad = 0;
return g.maxPool(network, fieldSize, stride, zeroPad);
}
private addFlatten(g: Graph, network: Tensor) {
const size = util.sizeFromShape(network.shape);
return g.reshape(network, [size]);
}
private addFullyConnected(g: Graph, network: Tensor, init: GraphWeights=null) {
const inputSize = util.sizeFromShape(network.shape);
const hiddenUnits = this.labelShape[0];
let weightsInitializer, biasInitializer;
if (init) {
weightsInitializer = new NDArrayInitializer(init['save-fc1-weights']);
biasInitializer = new NDArrayInitializer(init['save-fc1-bias']);
} else {
weightsInitializer = new VarianceScalingInitializer();
biasInitializer = new ZerosInitializer();
}
const useBias = true;
return g.layers.dense(
'save-fc1', network, hiddenUnits, null, useBias, weightsInitializer,
biasInitializer);
}
private addRegularization(g: Graph, network: Tensor) {
const beta = 0.01;
const betaTensor = g.variable('beta', Scalar.new(beta));
return g.multiply(betaTensor, g.reduceSum(g.square(network)));
}
private getTestData(): NDArray[][] {
const data = this.dataSet.getData();
if (data == null) {
return null;
}
const [images, labels] = this.dataSet.getData() as [NDArray[], NDArray[]];
const start = Math.floor(TRAIN_TEST_RATIO * images.length);
return [images.slice(start), labels.slice(start)];
}
private getTrainingData(): NDArray[][] {
const [images, labels] = this.dataSet.getData() as [NDArray[], NDArray[]];
const end = Math.floor(TRAIN_TEST_RATIO * images.length);
return [images.slice(0, end), labels.slice(0, end)];
}
private getTrainingCount() {
const [images, labels] = this.dataSet.getData() as [NDArray[], NDArray[]];
return TRAIN_TEST_RATIO * images.length;
}
async startInference() {
const testData = this.getTestData();
console.log(`Running inference on ${testData.length} points.`);
const inferenceShuffledInputProviderGenerator =
new InCPUMemoryShuffledInputProviderBuilder(testData);
const [inferenceInputProvider, inferenceLabelProvider] =
inferenceShuffledInputProviderGenerator.getInputProviders();
const inferenceFeeds = [
{tensor: this.inputTensor, data: inferenceInputProvider},
{tensor: this.labelTensor, data: inferenceLabelProvider}
];
const result = this.session.eval(this.predictionTensor, inferenceFeeds);
console.log(await result.data());
}
predict(input: NDArray) {
const inferenceFeeds = [
{tensor: this.inputTensor, data: input}
];
return this.session.eval(this.predictionTensor, inferenceFeeds);
}
} | the_stack |
import type Events from 'events'
import type tty from 'tty'
import throttle from 'lodash/throttle'
import type { Prompt } from 'enquirer'
import ansi from 'enquirer/lib/ansi'
import AutoComplete from 'enquirer/lib/prompts/autocomplete'
import Select from 'enquirer/lib/prompts/select'
import signale from './signale'
export interface Keypress {
sequence: string,
name: string,
ctrl?: boolean,
meta?: boolean,
shift?: boolean,
option?: boolean,
raw?: string,
code?: string,
action?: string,
}
export type ChoiceItem = string
export interface PromptState {
name: string,
message: string,
header: string,
footer: string,
error: string,
hint: string,
input: string,
cursor: number,
index: number,
lines: number,
size: number,
tick: number,
prompt: string,
buffer: string,
width: number,
promptLine: boolean,
choices: ChoiceItem[],
visible: ChoiceItem[],
initCol: number,
stdin: tty.ReadStream,
stdout: tty.WriteStream,
limit: number,
onRun: Function,
cancelled: boolean,
submitted: boolean,
loadingChoices: boolean,
}
export type PromptOptions = ConstructorParameters<typeof Prompt>[0] & {
/** used but not list in enquirer types */
promptLine: boolean,
onRun: (prompt: AutoComplete) => void,
}
export type PromptInstance = InstanceType<typeof Prompt>
export interface ExtraOptions {
input: string,
choices: string[],
limit?: number,
highlight?: () => void,
/** shell prompt takes columns, different with custom shell theme */
initCol?: number,
}
export const SIGINT_CODE = 3
const { stringify } = JSON
// @TODO: pointer length from Prompt.pointer()
const POINTER_LENGTH = 2
const POINTER_PLACEHOLDER = ' '.repeat(POINTER_LENGTH)
/**
* calculate how many rows the text takes up within terminal columns
*/
const calcTextTakeRows = (text: string, columns: number): number => {
return `${POINTER_PLACEHOLDER}${text}`
.split('\n')
// empty line is also considered take a row
.map(line => Math.ceil((line.length || 1) / columns))
.reduce((a, b) => a + b, 0)
}
/**
* performance optimization with
* enquirer/lib/utils.js scrollUp
*/
const scrollUpInPlace = <T = any>(list: T[], size: number = 1) => {
for (let i = 0; i < size; i ++) {
list.unshift(list.pop())
}
}
/**
* performance optimization with
* enquirer/lib/utils.js scrollDown
*/
const scrollDownInPlace = <T = any>(list: T[], size: number = 1) => {
for (let i = 0; i < size; i ++) {
list.push(list.shift())
}
}
/**
* @TODO: pipeline docs
* this.run()
* this.initialize()
* - this.reset()
* - set this.state.choices this.state.visible
* this.start()
* - keypress.listen()
* this.render()
* - this.format()
* - this.renderChoices() (use this.visible)
* - this.choiceMessage()
* this.pointer()
* - this.clear()
* - ansi.clear()
* - this.restore
*
* this.keypress() - dispatch
* - this.append()
* this.delete()
* this.deleteForward()
* - this.complete()
* this.suggest
* set visible
* this.up()
* this.down()
* this.pageUP()
* this.pageDown()
* this.home()
* this.end()
* - scroll visible
* set index
*
* this.submit()
* this.close()
* this.emit('submit', this.focused())
*
*/
export default class HistorySearcher extends AutoComplete {
public options: PromptOptions & ExtraOptions
private state: PromptState
private width: number
private height: number
private stdout: tty.WriteStream
private styles: any
private input: string
private cursor: number
private limit: number
private visible: ChoiceItem[]
private index: number
private pastingStep: null | 'starting' | 'started' | 'ending' | 'ended'
private alert: () => void
public keypress: (char: string | number, key?: Keypress) => void
public run: PromptInstance['run']
public once: PromptInstance['once']
private emit: Events['emit']
/**
* throttle render() for combo and paste
* skip AutoComplete.render highlight for performance
*/
render = throttle(Select.prototype.render, 72, { leading: true })
/**
* need pointer style like Select, while AutoComplete is empty
*/
pointer = Select.prototype.pointer
constructor(options: Partial<PromptOptions> & ExtraOptions) {
super(options)
this.input = options.input
this.cursor += options.input.length
/**
* pastingStep: null | starting | started | ending | ended
*
* starting - bracketed paste mode \e[200
* started - get first ~ after \e[200
* ending - ending but not complete of bracketed paste mode \e[201
* ended | null - finished paste, get first ~ after \e[201
*/
this.pastingStep = null
// start with initial col position rather than 0 default
this.stdout.write(ansi.cursor.to(options.initCol))
signale.debug('HistorySearcher size', { width: this.width, height: this.height })
}
get choices() {
return this.state.choices
}
set choices(choices: ChoiceItem[]) {
this.state.choices = choices
}
reset() {
const { choices } = this.options
this.choices = choices
this.visible = this.suggest(this.input, choices)
signale.debug('HistorySearcher reset()', { input: this.input })
}
/**
* calc and change `this.limit`,
* to dynamic limit choices rendered to avoid long and multiline choices
* overflow the terminal container
*
* rewrite `enquirer/lib/prompts/select.js`
*/
async renderChoices(): Promise<string> {
const { options, index, width, height } = this
/**
* prompt occupy 3 lines reserved
* be consistent in `this.down()`
*/
const heightLimit = height - 3
let limit = 0
let rows = 0
for (let choice of this.state.visible) {
const choiceRows = calcTextTakeRows(choice, width)
signale.debug('HistorySearcher: renderChoices()', 'choiceRows', choiceRows, { choice })
const nextRows = rows + choiceRows
signale.info('HistorySearcher renderChoices()', {
choiceRows,
choice: choice.slice(0, 10),
rows: nextRows,
width,
height: this.height,
limit,
})
if (nextRows >= heightLimit) {
break
}
rows += choiceRows
limit += 1
if (limit >= options.limit) {
signale.debug('HistorySearcher renderChoices() trigger max limit', { limit })
break
}
}
this.limit = limit
/** when pageDown, origin index will greater than limit */
if (index >= limit) {
this.index = limit -1
}
/**
* if empty choices to display, rows will be 0,
* but it will show `No matching choices` message in next line,
* so actually the minimum size is 1
*/
this.state.size = Math.max(1, rows)
return super.renderChoices()
}
choiceMessage(choice: ChoiceItem, index: number) {
const input = this.input
// @TODO: this.options.highlight
const shader = this.styles.placeholder
let message = choice
for (const item of new Set(input.toLowerCase().split(' ').filter(Boolean))) {
message = message.replace(item, shader(item))
}
return choice
}
/** format input area output */
format() {
const { input } = this
const { submitted, cancelled } = this.state
signale.debug('HistorySearcher format()', { input, submitted, cancelled })
if (input) {
return input
}
return ansi.code.show
}
/**
* overwrite, replace erase first line with erasePrompt (only erase from initial to end)
* used in this.render() - this.clear()
*/
clear(rows: number = 0) {
signale.debug('HistorySearcher clear()', { rows })
const { options } = this
const erasePrompt: string = ansi.cursor.to(options.initCol, null) + ansi.erase.lineEnd
this.stdout.write(
ansi.cursor.down(rows)
+ (ansi.erase.line + ansi.cursor.prevLine()).repeat(rows)
+ erasePrompt,
)
}
/** restore cursor position */
restore() {
const { width, limit, options, cursor } = this
// `state.size` computed in `this.renderChoices()`
const rows = this.state.size
signale.debug('HistorySearcher restore()', { limit, rows, width, cursor })
this.stdout.write(
ansi.cursor.up(rows)
+ ansi.cursor.to(options.initCol + cursor, null),
)
}
number(ch: string) {
return this.dispatch(ch)
}
dispatch(ch: string, key?: Keypress) {
// https://github.com/enquirer/enquirer/blob/2.3.2/lib/keypress.js#L104
// https://github.com/enquirer/enquirer/blob/2.3.2/lib/keypress.js#L209
const { sequence, ctrl, meta, option } = key || {} as Keypress
// [BUG enquirer] bracketed paste mode
// content will be wrapped by the sequences `\e[200~` and `\e[201~`
// https://www.xfree86.org/current/ctlseqs.html#Bracketed%20Paste%20Mode
if (sequence === '\u001b[200') {
this.pastingStep = 'starting'
signale.debug('Keypress start pasting \\e[200~')
return
} else if (this.pastingStep === 'starting' && sequence === '~') {
this.pastingStep = 'started'
signale.debug('Keypress in pasting')
return
} else if (this.pastingStep === 'started' && sequence === '\u001b[201') {
this.pastingStep = 'ending'
signale.debug('Keypress ending pasting \\e[201~')
return
} else if (this.pastingStep === 'ending' && sequence === '~') {
this.pastingStep = 'ended'
signale.debug('Keypress end pasted')
return
}
signale.debug(
'HistorySearcher dispatch',
{
char: ch,
key,
},
)
if (ch && !ctrl && !meta && !option) {
return super.dispatch(ch)
}
}
async complete() {
this.visible = this.suggest(this.input, this.choices)
this.index = Math.min(Math.max(this.visible.length - 1, 0), this.index)
await this.render()
}
suggest(input: string, choices: ChoiceItem[]) {
signale.debug('HistorySearcher suggest', { input })
let result = choices
const keywords = input.toLowerCase().split(' ').filter(Boolean)
if (!keywords.length) {
return result.slice()
}
for (const keyword of keywords) {
result = result.filter((message) => message.toLowerCase().includes(keyword))
}
return result
}
/**
* scroll optimize performance for in place
*/
up() {
const choices = this.state.visible
const len = choices.length
const vis = this.visible.length
let idx = this.index
signale.debug(
'HistorySearcher up()',
{ len, vis, idx },
)
if (!len) {
this.alert()
return
}
if (len > vis && idx === 0) {
scrollUpInPlace(choices)
idx += 1
}
this.index = ((idx - 1 % len) + len) % len
this.render()
}
/**
* scroll when multiline command
* and optimize performance for in place
*/
down() {
const choices = this.state.visible
const visible = this.visible
const len = choices.length
const vis = visible.length
let idx = this.index
/**
* prompt occupy 3 lines reserved
* be consistent in `this.renderChoices()`
*/
const heightLimit = this.height - 3
signale.debug(
'HistorySearcher down()',
{ len, vis, idx },
)
if (!vis) {
this.alert()
return
}
if (len > vis && idx === vis - 1) {
const nextChoice: ChoiceItem = choices[idx + 1]
const nextRows: number = calcTextTakeRows(nextChoice, this.width)
const visibleRowList: number[] = visible.map(
choice => calcTextTakeRows(choice, this.width),
)
let totalRows = visibleRowList.reduce((a, b) => a + b, 0) + nextRows
do {
totalRows = totalRows - visibleRowList.shift()
scrollDownInPlace(choices)
idx -= 1
} while (totalRows >= heightLimit)
}
this.index = (idx + 1) % len
this.render()
}
pageUp() {
const { limit } = this
const choices = this.state.visible
if (!choices.length) {
this.alert()
return
}
scrollUpInPlace(choices, limit)
this.render()
}
pageDown() {
const { limit } = this
const choices = this.state.visible
if (!choices.length) {
this.alert()
return
}
scrollDownInPlace(choices, limit)
this.render()
}
home() {
this.visible = this.suggest(this.input, this.choices)
this.index = 0
return this.render()
}
end() {
const choices = this.suggest(this.input, this.choices)
const pos = choices.length - this.limit
this.visible = choices.slice(pos).concat(choices.slice(0, pos))
this.index = this.limit - 1
return this.render()
}
get focused() {
return this.visible[this.index]
}
/**
* when submit, restore cursor from output row to input row
*
* when cancel, erase and leave origin input
*/
async close() {
const { focused, input } = this
const { submitted, cancelled } = this.state
signale.debug('HistorySearcher close()', { input, focused, submitted, cancelled })
this.emit('close')
}
async submit() {
signale.debug('HistorySearcher submit()')
this.state.submitted = true
await this.close()
const result = this.focused ?? this.input
signale.debug('HistorySearcher submit()', { result })
this.emit('submit', result)
}
async cancel(err?: string) {
signale.debug('HistorySearcher cancel()', { err })
this.state.cancelled = true
await this.close()
this.emit('cancel', await this.error(err))
}
error(err?: string): string {
if (err !== undefined) {
if (err === String.fromCharCode(SIGINT_CODE)) {
signale.debug('HistorySearcher cancel, terminated with SIGINT')
} else {
signale.error('HistorySearcher ERROR', stringify(err), new Error(err).stack)
}
}
return this.state.cancelled ? this.input : super.error(err)
}
} | the_stack |
import {
ClientFeatureRepository,
EdgeFeatureHubConfig,
EdgeService,
FeatureState,
FeatureValueType,
RolloutStrategy,
RolloutStrategyAttribute,
RolloutStrategyAttributeConditional,
RolloutStrategyFieldType,
SSEResultState
} from '../app';
import { expect } from 'chai';
import { Substitute } from '@fluffy-spoon/substitute';
describe('Feature repository reacts to incoming event lists as expected', () => {
let repo: ClientFeatureRepository;
beforeEach(() => {
repo = new ClientFeatureRepository();
});
it('Can handle null or undefined feature states', () => {
repo.notify(SSEResultState.Features, [undefined]);
});
it('Can handle post new state available handlers failing and letting subsequent ones continue', () => {
let postTrigger = 0;
let failTrigger = 0;
repo.addPostLoadNewFeatureStateAvailableListener(() => {
failTrigger++;
throw new Error('blah');
});
repo.addPostLoadNewFeatureStateAvailableListener(() => postTrigger++);
const features = [
new FeatureState({id: '1', key: 'banana', version: 1, type: FeatureValueType.Boolean, value: true}),
];
repo.notify(SSEResultState.Features, features);
repo.notify(SSEResultState.Feature, new FeatureState({
id: '1', key: 'banana', version: 2,
type: FeatureValueType.Boolean, value: false
}));
expect(postTrigger).to.eq(1);
expect(failTrigger).to.eq(1);
});
it('should accept a list of boolean features sets and triggers updates and stores values as expect', () => {
let triggerBanana = 0;
let triggerPear = 0;
let triggerPeach = 0;
repo.getFeatureState('banana').addListener(() => triggerBanana++);
repo.getFeatureState('pear').addListener(() => triggerPear++);
repo.getFeatureState('peach').addListener(() => triggerPeach++);
expect(triggerBanana).to.eq(0);
expect(triggerPear).to.eq(0);
expect(triggerPeach).to.eq(0);
// tslint:disable-next-line:no-unused-expression
expect(repo.getFeatureState('banana').getBoolean()).to.be.undefined;
// tslint:disable-next-line:no-unused-expression
expect(repo.getFeatureState('banana').isSet()).to.be.false;
// tslint:disable-next-line:no-unused-expression
expect(repo.getFeatureState('pear').getBoolean()).to.be.undefined;
// tslint:disable-next-line:no-unused-expression
expect(repo.getFeatureState('peach').getBoolean()).to.be.undefined;
const features = [
new FeatureState({id: '1', key: 'banana', version: 1, type: FeatureValueType.Boolean, value: true}),
new FeatureState({id: '2', key: 'pear', version: 1, type: FeatureValueType.Boolean, value: false}),
new FeatureState({id: '3', key: 'peach', version: 1, type: FeatureValueType.Boolean, value: true}),
];
repo.notify(SSEResultState.Features, features);
expect(triggerBanana).to.eq(1);
expect(triggerPear).to.eq(1);
expect(triggerPeach).to.eq(1);
// tslint:disable-next-line:no-unused-expression
expect(repo.getFeatureState('banana').getBoolean()).to.be.true;
// tslint:disable-next-line:no-unused-expression
expect(repo.getFeatureState('pear').getBoolean()).to.be.false;
// tslint:disable-next-line:no-unused-expression
expect(repo.getFeatureState('peach').getBoolean()).to.be.true;
const features2 = [
new FeatureState({id: '1', key: 'banana', version: 2, type: FeatureValueType.Boolean, value: false}),
new FeatureState({id: '2', key: 'pear', version: 1, type: FeatureValueType.Boolean, value: false}),
new FeatureState({id: '3', key: 'peach', version: 2, type: FeatureValueType.Boolean, value: true}),
];
// only banana should trigger as it has changed its value and its version
repo.notify(SSEResultState.Features, features2);
expect(triggerBanana).to.eq(2);
expect(triggerPear).to.eq(1);
expect(triggerPeach).to.eq(1);
// tslint:disable-next-line:no-unused-expression
expect(repo.getFeatureState('banana').getBoolean()).to.be.false;
// tslint:disable-next-line:no-unused-expression
expect(repo.getFeatureState('pear').getBoolean()).to.be.false;
// tslint:disable-next-line:no-unused-expression
expect(repo.getFeatureState('peach').getBoolean()).to.be.true;
});
it('should accept a list of number features sets and triggers updates and stores values as expect', () => {
let triggerBanana = 0;
let triggerPear = 0;
let triggerPeach = 0;
repo.getFeatureState('banana').addListener(() => triggerBanana++);
repo.getFeatureState('pear').addListener(() => triggerPear++);
repo.getFeatureState('peach').addListener(() => triggerPeach++);
const features = [
new FeatureState({id: '1', key: 'banana', version: 1, type: FeatureValueType.Number, value: 7.2}),
new FeatureState({id: '2', key: 'pear', version: 1, type: FeatureValueType.Number, value: 15}),
new FeatureState({id: '3', key: 'peach', version: 1, type: FeatureValueType.Number, value: 56534.23}),
];
repo.notify(SSEResultState.Features, features);
expect(triggerBanana).to.eq(1);
expect(triggerPear).to.eq(1);
expect(triggerPeach).to.eq(1);
expect(repo.getFeatureState('banana').getNumber()).to.eq(7.2);
expect(repo.getFeatureState('pear').getNumber()).to.eq(15);
expect(repo.getFeatureState('peach').getNumber()).to.eq(56534.23);
const features2 = [
new FeatureState({id: '1', key: 'banana', version: 2, type: FeatureValueType.Number, value: 16}),
new FeatureState({id: '2', key: 'pear', version: 1, type: FeatureValueType.Number, value: 15}),
new FeatureState({id: '3', key: 'peach', version: 2, type: FeatureValueType.Number, value: 56534.23}),
];
// only banana should trigger as it has changed its value and its version
repo.notify(SSEResultState.Features, features2);
expect(triggerBanana).to.eq(2);
expect(triggerPear).to.eq(1);
expect(triggerPeach).to.eq(1);
expect(repo.getFeatureState('banana').getNumber()).to.eq(16);
expect(repo.getFeatureState('pear').getNumber()).to.eq(15);
expect(repo.getFeatureState('peach').getNumber()).to.eq(56534.23);
expect(repo.getNumber('pear')).to.eq(15);
expect(repo.isSet('pear')).to.eq(true);
});
it('should accept and trigger events via a context and the repo in the same fashion for the same feature',
async () => {
const fhConfig = new EdgeFeatureHubConfig('http://localhost:8080', '123*123');
fhConfig.repository(repo);
const edgeService = Substitute.for<EdgeService>();
fhConfig.edgeServiceProvider((repository, config) => edgeService);
let triggerContext = 0;
let triggerRepo = 0;
let contextNumber = 0;
let repoNumber = 0;
repo.getFeatureState('fruit').addListener((fs) => {
repoNumber = fs.getNumber();
triggerRepo++;
});
const fhContext = await fhConfig.newContext().userKey('fred').build();
const feat = fhContext.feature('fruit');
feat.addListener((fs) => {
contextNumber = fs.getNumber();
triggerContext++;
});
const features = [
new FeatureState({
id: '1', key: 'fruit', version: 2, type: FeatureValueType.Number, value: 16,
strategies: [new RolloutStrategy({
id: '1', value: 12, attributes: [new RolloutStrategyAttribute({
type: RolloutStrategyFieldType.String, fieldName: 'userkey', values: ['fred'],
conditional: RolloutStrategyAttributeConditional.Equals
})]
})]
}),
];
// only banana should trigger as it has changed its value and its version
repo.notify(SSEResultState.Features, features);
expect(triggerContext).to.eq(1);
expect(triggerRepo).to.eq(1);
expect(repoNumber).to.eq(16);
expect(contextNumber).to.eq(12);
// mimic the same revision coming through, but a different main value, which is what would happen for a server
// eval change
const features2 = [
new FeatureState({
id: '1', key: 'fruit', version: 2, type: FeatureValueType.Number, value: 17,
strategies: [new RolloutStrategy({
id: '1', value: 13, attributes: [new RolloutStrategyAttribute({
type: RolloutStrategyFieldType.String, fieldName: 'userkey', values: ['fred'],
conditional: RolloutStrategyAttributeConditional.Equals
})]
})]
}),
];
repo.notify(SSEResultState.Features, features2);
expect(triggerContext).to.eq(2);
expect(triggerRepo).to.eq(2);
expect(repoNumber).to.eq(17);
expect(contextNumber).to.eq(13);
});
it('should accept a list of string features sets and triggers updates and stores values as expect', () => {
let triggerBanana = 0;
let triggerPear = 0;
let triggerPeach = 0;
repo.getFeatureState('banana').addListener(() => triggerBanana++);
repo.getFeatureState('pear').addListener(() => triggerPear++);
repo.getFeatureState('peach').addListener(() => triggerPeach++);
const features = [
new FeatureState({id: '1', key: 'banana', version: 1, type: FeatureValueType.String, value: '7.2'}),
new FeatureState({id: '2', key: 'pear', version: 1, type: FeatureValueType.String, value: '15'}),
new FeatureState({id: '3', key: 'peach', version: 1, type: FeatureValueType.String, value: '56534.23'}),
];
repo.notify(SSEResultState.Features, features);
expect(triggerBanana).to.eq(1);
expect(triggerPear).to.eq(1);
expect(triggerPeach).to.eq(1);
expect(repo.getFeatureState('banana').getString()).to.eq('7.2');
expect(repo.getFeatureState('pear').getString()).to.eq('15');
expect(repo.getFeatureState('peach').getString()).to.eq('56534.23');
const features2 = [
new FeatureState({id: '1', key: 'banana', version: 2, type: FeatureValueType.String, value: '16'}),
new FeatureState({id: '2', key: 'pear', version: 1, type: FeatureValueType.String, value: '15'}),
new FeatureState({id: '3', key: 'peach', version: 2, type: FeatureValueType.String, value: '56534.23'}),
];
// only banana should trigger as it has changed its value and its version
repo.notify(SSEResultState.Features, features2);
expect(triggerBanana).to.eq(2);
expect(triggerPear).to.eq(1);
expect(triggerPeach).to.eq(1);
expect(repo.getFeatureState('banana').getString()).to.eq('16');
expect(repo.getFeatureState('pear').getString()).to.eq('15');
expect(repo.getFeatureState('peach').getString()).to.eq('56534.23');
expect(repo.getString('peach')).to.eq('56534.23');
});
it('should accept a list of json features sets and triggers updates and stores values as expect', () => {
let triggerBanana = 0;
let triggerPear = 0;
let triggerPeach = 0;
repo.getFeatureState('banana').addListener(() => triggerBanana++);
repo.getFeatureState('pear').addListener(() => triggerPear++);
repo.getFeatureState('peach').addListener(() => triggerPeach++);
const features = [
new FeatureState({id: '1', key: 'banana', version: 1, type: FeatureValueType.Json, value: '{}'}),
new FeatureState({id: '2', key: 'pear', version: 1, type: FeatureValueType.Json, value: '"nashi"'}),
new FeatureState({
id: '3',
key: 'peach',
version: 1,
type: FeatureValueType.Json,
value: '{"variety": "golden queen"}'
}),
];
repo.notify(SSEResultState.Features, features);
expect(triggerBanana).to.eq(1);
expect(triggerPear).to.eq(1);
expect(triggerPeach).to.eq(1);
expect(repo.getFeatureState('banana').getRawJson()).to.eq('{}');
expect(repo.getFeatureState('pear').getRawJson()).to.eq('"nashi"');
expect(repo.getFeatureState('peach').getRawJson()).to.eq('{"variety": "golden queen"}');
const features2 = [
new FeatureState({id: '1', key: 'banana', version: 2, type: FeatureValueType.Json, value: '"yellow"'}),
new FeatureState({id: '2', key: 'pear', version: 1, type: FeatureValueType.Json, value: '"nashi"'}),
new FeatureState({
id: '3',
key: 'peach',
version: 2,
type: FeatureValueType.Json,
value: '{"variety": "golden queen"}'
}),
];
// only banana should trigger as it has changed its value and its version
repo.notify(SSEResultState.Features, features2);
expect(triggerBanana).to.eq(2);
expect(triggerPear).to.eq(1);
expect(triggerPeach).to.eq(1);
expect(repo.getFeatureState('banana').getRawJson()).to.eq('"yellow"');
expect(repo.getFeatureState('pear').getRawJson()).to.eq('"nashi"');
expect(repo.getFeatureState('peach').getRawJson()).to.eq('{"variety": "golden queen"}');
expect(repo.getJson('pear')).to.eq('"nashi"');
expect(repo.isSet('pear')).to.eq(true);
});
}); | the_stack |
import Backbone from "backbone"
import { keys, last } from "lodash"
import { fabricate } from "@artsy/antigravity"
const { CurrentUser } = require("../../lib/current_user")
describe("CurrentUser", () => {
let user
beforeEach(() => {
Backbone.sync = jest.fn()
user = new CurrentUser(fabricate("user"))
})
describe("#sync", () => {
it("does the right thing for fetch/save", () => {
user.save()
expect(Backbone.sync.mock.calls[0][2].data).toBeUndefined()
user.fetch()
expect(keys(Backbone.sync.mock.calls[1][2].data)).toContainEqual(
"access_token"
)
})
})
describe("#registeredForAuction", () => {
describe("when a user is not registered", () => {
it("returns false", done => {
user.registeredForAuction("foobar-auction", {
success(boolean) {
expect(boolean).toBeFalsy()
done()
},
})
Backbone.sync.mock.calls[0][2].data.sale_id.should.equal(
"foobar-auction"
)
Backbone.sync.mock.calls[0][2].success([])
})
})
describe("when a user is registered", () => {
it("returns true", done => {
user.registeredForAuction("foobar-auction-registered", {
success(boolean) {
boolean.should.be.true()
done()
},
})
Backbone.sync.mock.calls[0][2].data.sale_id.should.equal(
"foobar-auction-registered"
)
Backbone.sync.mock.calls[0][2].success([{ id: "existy" }])
})
})
it("when given a user is logged out error soaks it up and returns false", done => {
user.registeredForAuction("foobar", {
success(registered) {
registered.should.equal(false)
done()
},
})
Backbone.sync.mock.calls[0][2].error({
responseText: "A user is required",
})
})
})
describe("#fetchQualifiedBidder", () => {
describe("when a user has no bidders", () => {
it("returns false", done => {
user.fetchQualifiedBidder("foobar-auction", {
success(bool) {
bool.should.be.false()
done()
},
})
Backbone.sync.mock.calls[0][2].success([])
})
})
describe("when a user has no bidders for the auction", () => {
it("returns false", done => {
const bidder = {
id: "me",
qualified_for_bidding: true,
sale: {
id: "nothing",
},
}
user.fetchQualifiedBidder("foobar-auction", {
success(bool) {
bool.should.be.false()
done()
},
})
Backbone.sync.mock.calls[0][2].success([bidder])
})
})
describe("when a user is qualified", () => {
it("returns true", done => {
const bidder = {
id: "me",
qualified_for_bidding: true,
sale: {
id: "foobar-auction",
},
}
user.fetchQualifiedBidder("foobar-auction", {
success(bool) {
bool.should.be.true()
done()
},
})
Backbone.sync.mock.calls[0][2].success([bidder])
})
})
describe("when a user is not qualified", () => {
it("returns false", done => {
const bidder = {
id: "me",
qualified_for_bidding: false,
sale: {
id: "foobar-auction",
},
}
user.fetchQualifiedBidder("foobar-auction", {
success(bool) {
bool.should.be.false()
done()
},
})
Backbone.sync.mock.calls[0][2].success([bidder])
})
})
})
describe("#placeBid", () => {
it.skip("creates a new bid position given the right params", () => {})
})
describe("#savedArtwork", () => {
it("passess true to success cb if the user has saved the given artwork", () => {
user.set({ id: "current-user" })
user.savedArtwork("bitty", {
success(saved) {
saved.should.be.ok()
},
})
Backbone.sync.mock.calls[0][2].url.should.containEql(
"/api/v1/collection/saved-artwork/artworks?artworks[]=bitty&private=true&user_id=current-user"
)
Backbone.sync.mock.calls[0][2].success([fabricate("artwork")])
})
it("passes false to success cb if the user has not saved the given work", () => {
user.set({ id: "current-user" })
user.savedArtwork("bitty", {
error(msg) {
msg.should.not.be.ok()
},
success(saved) {
saved.should.not.be.ok()
},
})
Backbone.sync.mock.calls[0][2].url.should.containEql(
"/api/v1/collection/saved-artwork/artworks?artworks[]=bitty&private=true&user_id=current-user"
)
Backbone.sync.mock.calls[0][2].success([])
})
it("when the collection is not found, false is passed to the success cb", () => {
user.set({ id: "current-user" })
user.savedArtwork("bitty", {
error(msg) {
msg.should.not.be.ok()
},
success(saved) {
saved.should.not.be.ok()
},
})
Backbone.sync.mock.calls[0][2].url.should.containEql(
"/api/v1/collection/saved-artwork/artworks?artworks[]=bitty&private=true&user_id=current-user"
)
Backbone.sync.mock.calls[0][2].error({
responseText: "Collection not found",
})
})
it("calls the error cb for other errors", () => {
user.set({ id: "current-user" })
user.savedArtwork("bitty", {
error(msg) {
msg.should.be.ok()
},
success(msg) {
msg.should.not.be.ok()
},
})
Backbone.sync.mock.calls[0][2].url.should.containEql(
"/api/v1/collection/saved-artwork/artworks?artworks[]=bitty&private=true&user_id=current-user"
)
Backbone.sync.mock.calls[0][2].error({ responseText: "Unauthorized" })
})
})
describe("#followingArtists", () => {
it("makes the correct API call to retreive a list of artists the user is following", () => {
user.followingArtists()
Backbone.sync.mock.calls[0][0].should.equal("read")
Backbone.sync.mock.calls[0][2].url.should.containEql(
"/api/v1/me/follow/artists"
)
})
})
})
describe("CurrentUser", () => {
let user
beforeEach(() => {
jest.mock("sharify", () => ({
data: {
SESSION_ID: "cool-session-id",
},
}))
user = new CurrentUser(fabricate("user"))
Backbone.sync = jest.fn()
})
describe("#defaultArtworkCollection", () => {
it("throws a sensible error when you forget to initialize artwork collections", () => {
;() => user.defaultArtworkCollection().should.throw(/Must call/)
})
})
describe("#saveArtwork", () => {
it("makes the correct api call", () => {
user.initializeDefaultArtworkCollection()
user.saveArtwork("masterpiece", null)
Backbone.sync.mock.calls[1][0].should.equal("create")
})
})
describe("#removeArtwork", () => {
it("makes the correct api call", () => {
user.initializeDefaultArtworkCollection()
user.removeArtwork("masterpiece", null)
Backbone.sync.mock.calls[1][0].should.equal("delete")
})
})
describe("#fetchSuggestedHomepageArtworks", () => {
it("fetches homepages artworks", () => {
user.fetchSuggestedHomepageArtworks({})
Backbone.sync.mock.calls[0][2].url.should.containEql(
"suggested/artworks/homepage"
)
})
})
describe("#followArtist", () => {
it("follows an artist", () => {
user.followArtist("andy-foobar", {})
// @ts-expect-error PLEASE_FIX_ME_STRICT_NULL_CHECK_MIGRATION
last(Backbone.sync.mock.calls)[1]
.url()
.should.containEql("me/follow/artist")
})
it("injects the access token", () => {
user.set({ accessToken: "xfoobar" })
user.followArtist("andy-foobar", {})
// @ts-expect-error PLEASE_FIX_ME_STRICT_NULL_CHECK_MIGRATION
last(Backbone.sync.mock.calls)[2].access_token.should.equal("xfoobar")
})
})
describe("#checkRegisteredForAuction", () => {
it("makes the correct API call, accepts normal options", done => {
user.checkRegisteredForAuction({
saleId: "an-auction",
success(status) {
status.should.be.true()
done()
},
})
Backbone.sync.mock.calls[0][2].url.should.containEql("/api/v1/me/bidders")
Backbone.sync.mock.calls[0][2].data.sale_id.should.equal("an-auction")
Backbone.sync.mock.calls[0][2].success(["existy"])
})
})
describe("#fetchNotifications", () => {
it("makes the correct API call and has default size of 10", () => {
user.fetchNotificationBundles({
success(status) {
status.should.be.true()
},
})
Backbone.sync.mock.calls[0][2].url.should.containEql(
"/api/v1/me/notifications/feed"
)
Backbone.sync.mock.calls[0][2].data.size.should.equal(10)
})
})
describe("#fetchAndMarkNotifications", () => {
it("makes the correct API call and has defaults", () => {
user.fetchAndMarkNotifications({
success(status) {
status.should.be.true()
},
})
Backbone.sync.mock.calls[0][2].url.should.containEql(
"/api/v1/me/notifications"
)
Backbone.sync.mock.calls[0][2].data.type.should.equal("ArtworkPublished")
Backbone.sync.mock.calls[0][2].data.unread.should.be.true()
Backbone.sync.mock.calls[0][2].data.size.should.equal(100)
})
})
describe("#prepareForInquiry", () => {
beforeEach(() => {
user = new CurrentUser()
Backbone.sync = jest.fn()
})
it("creates or persists everything needed to make an inquiry", () => {
user.prepareForInquiry().then(function () {
Backbone.sync.callCount.should.equal(2)
Backbone.sync.mock.calls[0][1].url().should.containEql("/api/v1/me")
Backbone.sync.mock.calls[1][1].url.should.containEql(
"/api/v1/me/collector_profile"
)
})
})
})
describe("#isChecked", () => {
it("translates a boolean attribute to on or off", () => {
user.set({
follow_email: true,
offer_emails: false,
weekly_email: false,
})
expect(user.isChecked("weekly_email")).toBeUndefined()
expect(user.isChecked("offer_emails")).toBeUndefined()
user.isChecked("follow_email").should.be.true()
})
})
describe("authentications", () => {
let authentications
beforeEach(() => {
authentications = [
{ id: "1", provider: "twitter", uid: "123456789" },
{ id: "2", provider: "facebook", uid: "987654321" },
]
})
describe("relation", () => {
it("should inject initial authentications", () => {
const user = new CurrentUser({ authentications: authentications })
user.isLinkedTo("twitter").should.be.true()
user.isLinkedTo("facebook").should.be.true()
})
})
describe("#isLinkedTo", () => {
it("determines if an account is linked to an app provider", () => {
user.isLinkedTo("twitter").should.be.false()
user.related().authentications.reset(authentications)
user.isLinkedTo("twitter").should.be.true()
user.isLinkedTo("facebook").should.be.true()
})
})
})
}) | the_stack |
import React, { Fragment, useEffect } from 'react';
import { makeStyles, Theme, createStyles, Accordion as MuiAccordion, Grid, TextField, withStyles } from '@material-ui/core';
import MuiAccordionSummary from '@material-ui/core/AccordionSummary';
import MuiAccordionDetails from '@material-ui/core/AccordionDetails';
import Typography from '@material-ui/core/Typography';
import ExpandMoreIcon from '@material-ui/icons/ExpandMore';
import { IPropConfig } from "rx-drag/models/IPropConfig";
import intl from 'react-intl-universal';
import AttributeBoxActionSection from './AttributeBoxActionSection';
import AttributeBoxValidateArea from 'AppStudio/RxPageEditor/AttrebuteBox/ValidateArea';
import { IValidateRule } from "Base/Model/IValidateRule";
import { MultiSelectBox } from 'Components/Inputs/Select/MultiSelectBox';
import { resolveMetaConfig } from 'rx-drag/RxDrag';
import { observer } from 'mobx-react';
import { useDesign } from '../../../rx-drag/store/useDesign';
import { toJS } from 'mobx';
import { propsInputs } from './PropsInputs';
import { cloneObject } from 'rx-drag/utils/cloneObject';
import { MetaConfig } from 'Base/RXNode/MetaConfig';
import { useAppStudioStore } from 'AppStudio/AppStudioStore';
import { IRxAuth } from 'Base/Model/IRxAuth';
import { AttributeBoxMultiActionSection } from './AttributeBoxMultiActionSection';
import { stringValue } from 'rx-drag/utils/stringValue';
const useStyles = makeStyles((theme: Theme) =>
createStyles({
root: {
width:'100%',
},
heading: {
fontSize: theme.typography.pxToRem(15),
fontWeight: theme.typography.fontWeightRegular,
},
nodeLabel:{
padding:'16px',
color: theme.palette.text.secondary,
},
moreIcon:{
color:theme.palette.text.secondary,
}
}),
);
const Accordion = withStyles((theme) => ({
root: {
border: '1px solid ' + theme.palette.divider,
borderLeft: '0',
borderRight: '0',
boxShadow: 'none',
'&:not(:last-child)': {
borderBottom: 0,
},
'&:before': {
display: 'none',
},
'&$expanded': {
margin: 'auto',
},
},
expanded: {},
}))(MuiAccordion);
const AccordionSummary = withStyles((theme) => ({
root: {
backgroundColor: theme.palette.background.paper,
borderBottom: '1px solid ' + theme.palette.divider,
marginBottom: -1,
minHeight: 46,
'&$expanded': {
minHeight: 46,
},
},
content: {
'&$expanded': {
margin: '12px 0',
},
},
expanded: {},
}))(MuiAccordionSummary);
const AccordionDetails = withStyles((theme) => ({
root: {
backgroundColor: theme.palette.background.default,
padding: theme.spacing(3, 2, 2, 2),
},
}))(MuiAccordionDetails);
export const AttributeBox = observer(()=>{
const classes = useStyles();
const {rxDragStore} = useDesign();
const studioStore = useAppStudioStore();
const node = rxDragStore?.selectedNode;
const [metaConfig, setMetaConfig] = React.useState<MetaConfig>();
const [metaProps, setMetaProps] = React.useState<any>({});
const [validateRule, setValidateRule] = React.useState<IValidateRule>();
//const [auths, setAuths] = React.useState(node?.meta.auths);
useEffect(() => {
//setAuths(node?.meta.auths);
if(node?.meta.name){
setMetaProps(node?.meta.props || {});
setMetaConfig(resolveMetaConfig(node?.meta.name) as MetaConfig)
}
},[node]);
useEffect(() => {
setMetaProps(node?.meta.props || {});
// eslint-disable-next-line react-hooks/exhaustive-deps
},[node?.meta.props]);
const handlePropChange = (key:string, value:any) => {
const props = cloneObject(toJS(metaProps)||{});
props[key] = value;
rxDragStore?.updateSelecteMeta('props', props);
};
const handleFieldChange = (event: React.ChangeEvent<{ value: unknown }>)=>{
let newField = event.target.value as string;
rxDragStore?.updateSelecteMeta('field', newField);
}
const handleFieldGQLChange = (event: React.ChangeEvent<{ value: unknown }>)=>{
let newFieldGQL = event.target.value as string;
rxDragStore?.updateSelecteMeta('fieldsGql', newFieldGQL);
}
const handleRuleChange = (rule:IValidateRule)=>{
//node?.updateProp('rule', rule);
setValidateRule(rule);
}
const handleChangeAuths = (auths : IRxAuth[]|undefined)=>{
rxDragStore?.updateSelecteMeta('auths', auths);
}
return (
<div className={classes.root}>
<div className={classes.nodeLabel}>
{intl.get('selected-node')}
<span style={{color:'#5d78ff'}}>{node?.meta.name ? node?.meta.name : 'None' }</span>
</div>
{node&&
<Fragment>
<Accordion>
<AccordionSummary
expandIcon={<ExpandMoreIcon className = {classes.moreIcon} />}
>
<Typography className={classes.heading}>{intl.get('attributes')}</Typography>
</AccordionSummary>
<AccordionDetails>
<Grid container spacing={2}>
{
metaConfig?.getPropConfigs().map((config:IPropConfig, index)=>{
const PropInput = config.propType ? propsInputs[config.propType] : undefined;
const label = config.label || (config.labelKey && intl.get(config.labelKey)) ||'';
console.assert(PropInput, `Config editor not exist,Meta:${node?.meta.name}, prop:${config.name}, propType:${config.propType}`);
return(
PropInput &&
<PropInput
key={index + '-' + config.name}
label = {label}
value = {metaProps[config.name]}
onChange={(value: any) => handlePropChange(config.name, value)}
{...config.props}
/>
)
})
}
</Grid>
</AccordionDetails>
</Accordion>
{metaConfig?.hasField &&
<Accordion>
<AccordionSummary
expandIcon={<ExpandMoreIcon className = {classes.moreIcon} />}
>
<Typography className={classes.heading}>{intl.get('data')}</Typography>
</AccordionSummary>
<AccordionDetails key={node.id + '-data'}>
<TextField
fullWidth
size="small"
variant = "outlined"
label={intl.get("field")} value={stringValue(node?.meta.field)}
onChange={handleFieldChange}>
</TextField>
</AccordionDetails>
</Accordion>
}
{
metaConfig?.hasValidation &&
<Accordion >
<AccordionSummary
expandIcon={<ExpandMoreIcon className = {classes.moreIcon} />}
>
<Typography className={classes.heading}>{intl.get('validate')}</Typography>
</AccordionSummary>
<AccordionDetails key={node.id + '-rule'}>
<AttributeBoxValidateArea rule={validateRule} onChange={handleRuleChange} />
</AccordionDetails>
</Accordion>
}
{metaConfig?.hasAction &&
<Accordion>
<AccordionSummary
expandIcon={<ExpandMoreIcon className = {classes.moreIcon} />}
>
<Typography className={classes.heading}>{intl.get('action')}</Typography>
</AccordionSummary>
<AccordionDetails key={node.id + '-action'}>
<Grid container spacing={2}>
<AttributeBoxActionSection
action = {metaProps.onClick}
onChange = {action=>handlePropChange('onClick', action)}
/>
</Grid>
</AccordionDetails>
</Accordion>
}
{
metaConfig?.hasMultiAction &&
<Accordion>
<AccordionSummary
expandIcon={<ExpandMoreIcon className = {classes.moreIcon} />}
>
<Typography className={classes.heading}>{intl.get('action')}</Typography>
</AccordionSummary>
<AccordionDetails key={node.id + '-actions'}>
<Grid container spacing={2}>
<AttributeBoxMultiActionSection
actions = {metaProps.actions}
onChange = {action=>handlePropChange('actions', action)}
/>
</Grid>
</AccordionDetails>
</Accordion>
}
{
metaConfig?.hasGraphQl &&
<Accordion>
<AccordionSummary
expandIcon={<ExpandMoreIcon className = {classes.moreIcon} />}
>
<Typography className={classes.heading}>GraphQL</Typography>
</AccordionSummary>
<AccordionDetails key={node.id + '-grahpql'}>
<Grid container spacing={2}>
<TextField
fullWidth
multiline
rows = {3}
size="small"
variant = "outlined"
label={intl.get("field") + ' GraphQL'} value={stringValue(node?.meta.fieldsGql)}
onChange={handleFieldGQLChange}>
</TextField>
</Grid>
</AccordionDetails>
</Accordion>
}
<Accordion>
<AccordionSummary
expandIcon={<ExpandMoreIcon className = {classes.moreIcon} />}
>
<Typography className={classes.heading}>{intl.get('authority')}</Typography>
</AccordionSummary>
<AccordionDetails>
<Grid container spacing={2}>
<Grid item xs={12}>
<MultiSelectBox fullWidth label={intl.get('authority')}
variant="outlined"
size="small"
items = {studioStore?.rxApp?.auths || []}
value = {node?.meta.auths || []}
onChange = {(e:any)=>handleChangeAuths(e.target.value)}
/>
</Grid>
</Grid>
</AccordionDetails>
</Accordion>
</Fragment>
}
</div>
)
}) | the_stack |
'use strict';
import {Color} from 'vs/base/common/color';
import * as assert from 'assert';
suite('Color', () => {
test('rgba2hsla', function () {
assert.deepEqual({ h: 0, s: 0, l: 0, a: 1 }, Color.fromRGBA({ r: 0, g: 0, b: 0, a: 1 }).toHSLA());
assert.deepEqual({ h: 0, s: 0, l: 1, a: 1 }, Color.fromRGBA({ r: 255, g: 255, b: 255, a: 1 }).toHSLA());
assert.deepEqual({ h: 0, s: 1, l: 0.5, a: 1 }, Color.fromRGBA({ r: 255, g: 0, b: 0, a: 1 }).toHSLA());
assert.deepEqual({ h: 120, s: 1, l: 0.5, a: 1 }, Color.fromRGBA({ r: 0, g: 255, b: 0, a: 1 }).toHSLA());
assert.deepEqual({ h: 240, s: 1, l: 0.5, a: 1 }, Color.fromRGBA({ r: 0, g: 0, b: 255, a: 1 }).toHSLA());
assert.deepEqual({ h: 60, s: 1, l: 0.5, a: 1 }, Color.fromRGBA({ r: 255, g: 255, b: 0, a: 1 }).toHSLA());
assert.deepEqual({ h: 180, s: 1, l: 0.5, a: 1 }, Color.fromRGBA({ r: 0, g: 255, b: 255, a: 1 }).toHSLA());
assert.deepEqual({ h: 300, s: 1, l: 0.5, a: 1 }, Color.fromRGBA({ r: 255, g: 0, b: 255, a: 1 }).toHSLA());
assert.deepEqual({ h: 0, s: 0, l: 0.753, a: 1 }, Color.fromRGBA({ r: 192, g: 192, b: 192, a: 1 }).toHSLA());
assert.deepEqual({ h: 0, s: 0, l: 0.502, a: 1 }, Color.fromRGBA({ r: 128, g: 128, b: 128, a: 1 }).toHSLA());
assert.deepEqual({ h: 0, s: 1, l: 0.251, a: 1 }, Color.fromRGBA({ r: 128, g: 0, b: 0, a: 1 }).toHSLA());
assert.deepEqual({ h: 60, s: 1, l: 0.251, a: 1 }, Color.fromRGBA({ r: 128, g: 128, b: 0, a: 1 }).toHSLA());
assert.deepEqual({ h: 120, s: 1, l: 0.251, a: 1 }, Color.fromRGBA({ r: 0, g: 128, b: 0, a: 1 }).toHSLA());
assert.deepEqual({ h: 300, s: 1, l: 0.251, a: 1 }, Color.fromRGBA({ r: 128, g: 0, b: 128, a: 1 }).toHSLA());
assert.deepEqual({ h: 180, s: 1, l: 0.251, a: 1 }, Color.fromRGBA({ r: 0, g: 128, b: 128, a: 1 }).toHSLA());
assert.deepEqual({ h: 240, s: 1, l: 0.251, a: 1 }, Color.fromRGBA({ r: 0, g: 0, b: 128, a: 1 }).toHSLA());
});
test('hsla2rgba', function () {
assert.deepEqual({ r: 0, g: 0, b: 0, a: 1 }, Color.fromHSLA({ h: 0, s: 0, l: 0, a: 1 }).toRGBA());
assert.deepEqual({ r: 255, g: 255, b: 255, a: 1 }, Color.fromHSLA({ h: 0, s: 0, l: 1, a: 1 }).toRGBA());
assert.deepEqual({ r: 255, g: 0, b: 0, a: 1 }, Color.fromHSLA({ h: 0, s: 1, l: 0.5, a: 1 }).toRGBA());
assert.deepEqual({ r: 0, g: 255, b: 0, a: 1 }, Color.fromHSLA({ h: 120, s: 1, l: 0.5, a: 1 }).toRGBA());
assert.deepEqual({ r: 0, g: 0, b: 255, a: 1 }, Color.fromHSLA({ h: 240, s: 1, l: 0.5, a: 1 }).toRGBA());
assert.deepEqual({ r: 255, g: 255, b: 0, a: 1 }, Color.fromHSLA({ h: 60, s: 1, l: 0.5, a: 1 }).toRGBA());
assert.deepEqual({ r: 0, g: 255, b: 255, a: 1 }, Color.fromHSLA({ h: 180, s: 1, l: 0.5, a: 1 }).toRGBA());
assert.deepEqual({ r: 255, g: 0, b: 255, a: 1 }, Color.fromHSLA({ h: 300, s: 1, l: 0.5, a: 1 }).toRGBA());
assert.deepEqual({ r: 192, g: 192, b: 192, a: 1 }, Color.fromHSLA({ h: 0, s: 0, l: 0.753, a: 1 }).toRGBA());
assert.deepEqual({ r: 128, g: 128, b: 128, a: 1 }, Color.fromHSLA({ h: 0, s: 0, l: 0.502, a: 1 }).toRGBA());
assert.deepEqual({ r: 128, g: 0, b: 0, a: 1 }, Color.fromHSLA({ h: 0, s: 1, l: 0.251, a: 1 }).toRGBA());
assert.deepEqual({ r: 128, g: 128, b: 0, a: 1 }, Color.fromHSLA({ h: 60, s: 1, l: 0.251, a: 1 }).toRGBA());
assert.deepEqual({ r: 0, g: 128, b: 0, a: 1 }, Color.fromHSLA({ h: 120, s: 1, l: 0.251, a: 1 }).toRGBA());
assert.deepEqual({ r: 128, g: 0, b: 128, a: 1 }, Color.fromHSLA({ h: 300, s: 1, l: 0.251, a: 1 }).toRGBA());
assert.deepEqual({ r: 0, g: 128, b: 128, a: 1 }, Color.fromHSLA({ h: 180, s: 1, l: 0.251, a: 1 }).toRGBA());
assert.deepEqual({ r: 0, g: 0, b: 128, a: 1 }, Color.fromHSLA({ h: 240, s: 1, l: 0.251, a: 1 }).toRGBA());
});
test('hex2rgba', function () {
assert.deepEqual({ r: 0, g: 0, b: 0, a: 1 }, Color.fromHex('#000000').toRGBA());
assert.deepEqual({ r: 255, g: 255, b: 255, a: 1 }, Color.fromHex('#FFFFFF').toRGBA());
assert.deepEqual({ r: 255, g: 0, b: 0, a: 1 }, Color.fromHex('#FF0000').toRGBA());
assert.deepEqual({ r: 0, g: 255, b: 0, a: 1 }, Color.fromHex('#00FF00').toRGBA());
assert.deepEqual({ r: 0, g: 0, b: 255, a: 1 }, Color.fromHex('#0000FF').toRGBA());
assert.deepEqual({ r: 255, g: 255, b: 0, a: 1 }, Color.fromHex('#FFFF00').toRGBA());
assert.deepEqual({ r: 0, g: 255, b: 255, a: 1 }, Color.fromHex('#00FFFF').toRGBA());
assert.deepEqual({ r: 255, g: 0, b: 255, a: 1 }, Color.fromHex('#FF00FF').toRGBA());
assert.deepEqual({ r: 192, g: 192, b: 192, a: 1 }, Color.fromHex('#C0C0C0').toRGBA());
assert.deepEqual({ r: 128, g: 128, b: 128, a: 1 }, Color.fromHex('#808080').toRGBA());
assert.deepEqual({ r: 128, g: 0, b: 0, a: 1 }, Color.fromHex('#800000').toRGBA());
assert.deepEqual({ r: 128, g: 128, b: 0, a: 1 }, Color.fromHex('#808000').toRGBA());
assert.deepEqual({ r: 0, g: 128, b: 0, a: 1 }, Color.fromHex('#008000').toRGBA());
assert.deepEqual({ r: 128, g: 0, b: 128, a: 1 }, Color.fromHex('#800080').toRGBA());
assert.deepEqual({ r: 0, g: 128, b: 128, a: 1 }, Color.fromHex('#008080').toRGBA());
assert.deepEqual({ r: 0, g: 0, b: 128, a: 1 }, Color.fromHex('#000080').toRGBA());
});
test('isLighterColor', function () {
let color1 = Color.fromHSLA({ h: 60, s: 1, l: 0.5, a: 1 }), color2 = Color.fromHSLA({ h: 0, s: 0, l: 0.753, a: 1 });
assert.ok(color1.isLighterThan(color2));
// Abyss theme
assert.ok(Color.fromHex('#770811').isLighterThan(Color.fromHex('#000c18')));
});
test('getLighterColor', function () {
let color1 = Color.fromHSLA({ h: 60, s: 1, l: 0.5, a: 1 }), color2 = Color.fromHSLA({ h: 0, s: 0, l: 0.753, a: 1 });
assert.deepEqual(color1.toHSLA(), Color.getLighterColor(color1, color2).toHSLA());
assert.deepEqual({ h: 0, s: 0, l: 0.914, a: 1 }, Color.getLighterColor(color2, color1).toHSLA());
assert.deepEqual({ h: 0, s: 0, l: 0.851, a: 1 }, Color.getLighterColor(color2, color1, 0.3).toHSLA());
assert.deepEqual({ h: 0, s: 0, l: 0.98, a: 1 }, Color.getLighterColor(color2, color1, 0.7).toHSLA());
assert.deepEqual({ h: 0, s: 0, l: 1, a: 1 }, Color.getLighterColor(color2, color1, 1).toHSLA());
});
test('isDarkerColor', function () {
let color1 = Color.fromHSLA({ h: 60, s: 1, l: 0.5, a: 1 }), color2 = Color.fromHSLA({ h: 0, s: 0, l: 0.753, a: 1 });
assert.ok(color2.isDarkerThan(color1));
});
test('getDarkerColor', function () {
let color1 = Color.fromHSLA({ h: 60, s: 1, l: 0.5, a: 1 }), color2 = Color.fromHSLA({ h: 0, s: 0, l: 0.753, a: 1 });
assert.deepEqual(color2.toHSLA(), Color.getDarkerColor(color2, color1).toHSLA());
assert.deepEqual({ h: 60, s: 1, l: 0.392, a: 1 }, Color.getDarkerColor(color1, color2).toHSLA());
assert.deepEqual({ h: 60, s: 1, l: 0.435, a: 1 }, Color.getDarkerColor(color1, color2, 0.3).toHSLA());
assert.deepEqual({ h: 60, s: 1, l: 0.349, a: 1 }, Color.getDarkerColor(color1, color2, 0.7).toHSLA());
assert.deepEqual({ h: 60, s: 1, l: 0.284, a: 1 }, Color.getDarkerColor(color1, color2, 1).toHSLA());
// Abyss theme
assert.deepEqual({ h: 355, s: 0.874, l: 0.157, a: 1 }, Color.getDarkerColor(Color.fromHex('#770811'), Color.fromHex('#000c18'), 0.4).toHSLA());
});
test('luminosity', function () {
assert.deepEqual(0, Color.fromRGBA({ r: 0, g: 0, b: 0, a: 1 }).getLuminosity());
assert.deepEqual(1, Color.fromRGBA({ r: 255, g: 255, b: 255, a: 1 }).getLuminosity());
assert.deepEqual(0.2126, Color.fromRGBA({ r: 255, g: 0, b: 0, a: 1 }).getLuminosity());
assert.deepEqual(0.7152, Color.fromRGBA({ r: 0, g: 255, b: 0, a: 1 }).getLuminosity());
assert.deepEqual(0.0722, Color.fromRGBA({ r: 0, g: 0, b: 255, a: 1 }).getLuminosity());
assert.deepEqual(0.9278, Color.fromRGBA({ r: 255, g: 255, b: 0, a: 1 }).getLuminosity());
assert.deepEqual(0.7874, Color.fromRGBA({ r: 0, g: 255, b: 255, a: 1 }).getLuminosity());
assert.deepEqual(0.2848, Color.fromRGBA({ r: 255, g: 0, b: 255, a: 1 }).getLuminosity());
assert.deepEqual(0.5271, Color.fromRGBA({ r: 192, g: 192, b: 192, a: 1 }).getLuminosity());
assert.deepEqual(0.2159, Color.fromRGBA({ r: 128, g: 128, b: 128, a: 1 }).getLuminosity());
assert.deepEqual(0.0459, Color.fromRGBA({ r: 128, g: 0, b: 0, a: 1 }).getLuminosity());
assert.deepEqual(0.2003, Color.fromRGBA({ r: 128, g: 128, b: 0, a: 1 }).getLuminosity());
assert.deepEqual(0.1544, Color.fromRGBA({ r: 0, g: 128, b: 0, a: 1 }).getLuminosity());
assert.deepEqual(0.0615, Color.fromRGBA({ r: 128, g: 0, b: 128, a: 1 }).getLuminosity());
assert.deepEqual(0.17, Color.fromRGBA({ r: 0, g: 128, b: 128, a: 1 }).getLuminosity());
assert.deepEqual(0.0156, Color.fromRGBA({ r: 0, g: 0, b: 128, a: 1 }).getLuminosity());
});
test('contrast', function () {
assert.deepEqual(0, Color.fromRGBA({ r: 0, g: 0, b: 0, a: 1 }).getLuminosity());
assert.deepEqual(1, Color.fromRGBA({ r: 255, g: 255, b: 255, a: 1 }).getLuminosity());
assert.deepEqual(0.2126, Color.fromRGBA({ r: 255, g: 0, b: 0, a: 1 }).getLuminosity());
assert.deepEqual(0.7152, Color.fromRGBA({ r: 0, g: 255, b: 0, a: 1 }).getLuminosity());
assert.deepEqual(0.0722, Color.fromRGBA({ r: 0, g: 0, b: 255, a: 1 }).getLuminosity());
assert.deepEqual(0.9278, Color.fromRGBA({ r: 255, g: 255, b: 0, a: 1 }).getLuminosity());
assert.deepEqual(0.7874, Color.fromRGBA({ r: 0, g: 255, b: 255, a: 1 }).getLuminosity());
assert.deepEqual(0.2848, Color.fromRGBA({ r: 255, g: 0, b: 255, a: 1 }).getLuminosity());
assert.deepEqual(0.5271, Color.fromRGBA({ r: 192, g: 192, b: 192, a: 1 }).getLuminosity());
assert.deepEqual(0.2159, Color.fromRGBA({ r: 128, g: 128, b: 128, a: 1 }).getLuminosity());
assert.deepEqual(0.0459, Color.fromRGBA({ r: 128, g: 0, b: 0, a: 1 }).getLuminosity());
assert.deepEqual(0.2003, Color.fromRGBA({ r: 128, g: 128, b: 0, a: 1 }).getLuminosity());
assert.deepEqual(0.1544, Color.fromRGBA({ r: 0, g: 128, b: 0, a: 1 }).getLuminosity());
assert.deepEqual(0.0615, Color.fromRGBA({ r: 128, g: 0, b: 128, a: 1 }).getLuminosity());
assert.deepEqual(0.17, Color.fromRGBA({ r: 0, g: 128, b: 128, a: 1 }).getLuminosity());
assert.deepEqual(0.0156, Color.fromRGBA({ r: 0, g: 0, b: 128, a: 1 }).getLuminosity());
});
}); | the_stack |
import { Inject, Injectable, isDevMode, OnDestroy, Optional } from '@angular/core';
import { Params } from '@angular/router';
import { EMPTY, forkJoin, from, Observable, Subject, zip } from 'rxjs';
import {
catchError,
concatMap,
debounceTime,
distinctUntilChanged,
filter,
map,
startWith,
switchMap,
takeUntil,
tap
} from 'rxjs/operators';
import { compareParamMaps, filterParamMap, isMissing, isPresent, NOP } from '../util';
import { QueryParamGroup } from '../model/query-param-group';
import { MultiQueryParam, PartitionedQueryParam, QueryParam } from '../model/query-param';
import { NGQP_ROUTER_ADAPTER, NGQP_ROUTER_OPTIONS, RouterAdapter, RouterOptions } from '../router-adapter/router-adapter.interface';
import { QueryParamAccessor } from './query-param-accessor.interface';
/** @internal */
function isMultiQueryParam<T>(queryParam: QueryParam<T> | MultiQueryParam<T>): queryParam is MultiQueryParam<T> {
return queryParam.multi;
}
/** @internal */
class NavigationData {
constructor(public params: Params, public synthetic: boolean = false) {
}
}
/**
* Service implementing the synchronization logic
*
* This service is the key to the synchronization process by binding a {@link QueryParamGroup}
* to the router.
*
* @internal
*/
@Injectable()
export class QueryParamGroupService implements OnDestroy {
/** The {@link QueryParamGroup} to bind. */
private queryParamGroup: QueryParamGroup | null = null;
/** List of {@link QueryParamAccessor} registered to this service. */
private directives = new Map<string, QueryParamAccessor[]>();
/**
* Queue of navigation parameters
*
* A queue is used for navigations as we need to make sure all parameter changes
* are executed in sequence as otherwise navigations might overwrite each other.
*/
private queue$ = new Subject<NavigationData>();
/** @ignore */
private synchronizeRouter$ = new Subject<void>();
/** @ignore */
private destroy$ = new Subject<void>();
constructor(
@Inject(NGQP_ROUTER_ADAPTER) private routerAdapter: RouterAdapter,
@Optional() @Inject(NGQP_ROUTER_OPTIONS) private globalRouterOptions: RouterOptions
) {
this.setupNavigationQueue();
}
/** @ignore */
public ngOnDestroy() {
this.destroy$.next();
this.destroy$.complete();
this.synchronizeRouter$.complete();
this.queryParamGroup?._clearChangeFunctions();
if (this.queryParamGroup?.options?.clearOnDestroy) {
const nullParams = Object.values(this.queryParamGroup.queryParams)
.map(queryParam => this.wrapIntoPartition(queryParam))
.map(partitionedQueryParam => partitionedQueryParam.queryParams.map(queryParam => queryParam.urlParam))
.reduce((a, b) => [...a, ...b], [])
.map(urlParam => ({[urlParam]: null}))
.reduce((a, b) => ({...a, ...b}), {});
this.routerAdapter.navigate(nullParams, {
replaceUrl: true,
}).then();
}
}
/**
* Uses the given {@link QueryParamGroup} for synchronization.
*/
public setQueryParamGroup(queryParamGroup: QueryParamGroup): void {
// FIXME: If this is called when we already have a group, we probably need to do
// some cleanup first.
if (this.queryParamGroup) {
throw new Error(`A QueryParamGroup has already been setup. Changing the group is currently not supported.`);
}
this.queryParamGroup = queryParamGroup;
this.startSynchronization();
}
/**
* Registers a {@link QueryParamAccessor}.
*/
public registerQueryParamDirective(directive: QueryParamAccessor): void {
if (!directive.valueAccessor) {
throw new Error(`No value accessor found for the form control. Please make sure to implement ControlValueAccessor on this component.`);
}
// Capture the name here, particularly for the queue below to avoid re-evaluating
// it as it might change over time.
const queryParamName = directive.name;
const partitionedQueryParam = this.getQueryParamAsPartition(queryParamName);
// Chances are that we read the initial route before a directive has been registered here.
// The value in the model will be correct, but we need to sync it to the view once initially.
directive.valueAccessor.writeValue(partitionedQueryParam.value);
// Proxy updates from the view to debounce them (if needed).
const queues = partitionedQueryParam.queryParams.map(() => new Subject<unknown>());
zip(
...queues.map((queue$, index) => {
const queryParam = partitionedQueryParam.queryParams[index];
return queue$.pipe(
isPresent(queryParam.debounceTime) ? debounceTime(queryParam.debounceTime) : tap(),
);
})
).pipe(
// Do not synchronize while the param is detached from the group
filter(() => !!this.getQueryParamGroup().get(queryParamName)),
map((newValue: unknown[]) => this.getParamsForValue(partitionedQueryParam, partitionedQueryParam.reduce(newValue))),
takeUntil(this.destroy$),
).subscribe(params => this.enqueueNavigation(new NavigationData(params)));
directive.valueAccessor.registerOnChange((newValue: unknown) => {
const partitioned = partitionedQueryParam.partition(newValue);
queues.forEach((queue$, index) => queue$.next(partitioned[index]));
});
this.directives.set(queryParamName, [...(this.directives.get(queryParamName) || []), directive]);
}
/**
* Deregisters a {@link QueryParamAccessor} by referencing its name.
*/
public deregisterQueryParamDirective(queryParamName: string): void {
if (!queryParamName) {
return;
}
const directives = this.directives.get(queryParamName);
if (!directives) {
return;
}
directives.forEach(directive => {
directive.valueAccessor.registerOnChange(NOP);
directive.valueAccessor.registerOnTouched(NOP);
});
this.directives.delete(queryParamName);
const queryParam = this.getQueryParamGroup().get(queryParamName);
if (queryParam) {
queryParam._clearChangeFunctions();
}
}
private startSynchronization() {
this.setupGroupChangeListener();
this.setupParamChangeListeners();
this.setupRouterListener();
this.watchNewParams();
}
/** Listens for programmatic changes on group level and synchronizes to the router. */
private setupGroupChangeListener(): void {
this.getQueryParamGroup()._registerOnChange((newValue: Record<string, unknown> | null) => {
if (newValue === null) {
throw new Error(`Received null value from QueryParamGroup.`);
}
let params: Params = {};
Object.keys(newValue).forEach(queryParamName => {
const queryParam = this.getQueryParamGroup().get(queryParamName);
if (isMissing(queryParam)) {
return;
}
params = { ...params, ...this.getParamsForValue(queryParam, newValue[ queryParamName ]) };
});
this.enqueueNavigation(new NavigationData(params, true));
});
}
/** Listens for programmatic changes on parameter level and synchronizes to the router. */
private setupParamChangeListeners(): void {
Object.keys(this.getQueryParamGroup().queryParams)
.forEach(queryParamName => this.setupParamChangeListener(queryParamName));
}
private setupParamChangeListener(queryParamName: string): void {
const queryParam = this.getQueryParamGroup().get(queryParamName);
if (!queryParam) {
throw new Error(`No param in group found for name ${queryParamName}`);
}
queryParam._registerOnChange((newValue: unknown) =>
this.enqueueNavigation(new NavigationData(this.getParamsForValue(queryParam, newValue), true))
);
}
/** Listens for changes in the router and synchronizes to the model. */
private setupRouterListener(): void {
this.synchronizeRouter$.pipe(
startWith(undefined),
switchMap(() => this.routerAdapter.queryParamMap.pipe(
// We want to ignore changes to query parameters which aren't related to this
// particular group; however, we do need to react if one of our parameters has
// vanished when it was set before.
distinctUntilChanged((previousMap, currentMap) => {
const keys = Object.values(this.getQueryParamGroup().queryParams)
.map(queryParam => this.wrapIntoPartition(queryParam))
.map(partitionedQueryParam => partitionedQueryParam.queryParams.map(queryParam => queryParam.urlParam))
.reduce((a, b) => [...a, ...b], []);
// It is important that we filter the maps only here so that both are filtered
// with the same set of keys; otherwise, e.g. removing a parameter from the group
// would interfere.
return compareParamMaps(filterParamMap(previousMap, keys), filterParamMap(currentMap, keys));
}),
)),
switchMap(queryParamMap => {
// We need to capture this right here since this is only set during the on-going navigation.
const synthetic = this.isSyntheticNavigation();
const queryParamNames = Object.keys(this.getQueryParamGroup().queryParams);
return forkJoin<Record<string, unknown>>(...queryParamNames
.map(queryParamName => {
const partitionedQueryParam = this.getQueryParamAsPartition(queryParamName);
return forkJoin<unknown>(...partitionedQueryParam.queryParams
.map(queryParam => isMultiQueryParam<unknown>(queryParam)
? queryParam.deserializeValue(queryParamMap.getAll(queryParam.urlParam))
: queryParam.deserializeValue(queryParamMap.get(queryParam.urlParam))
)
).pipe(
map(newValues => partitionedQueryParam.reduce(newValues)),
tap(newValue => {
const directives = this.directives.get(queryParamName);
if (directives) {
directives.forEach(directive => directive.valueAccessor.writeValue(newValue));
}
}),
map(newValue => {
return { [ queryParamName ]: newValue };
}),
takeUntil(this.destroy$),
);
})
).pipe(
map((values: Record<string, unknown>[]) => values.reduce((groupValue, value) => {
return {
...groupValue,
...value,
};
}, {})),
tap(groupValue => this.getQueryParamGroup().setValue(groupValue, {
emitEvent: !synthetic,
emitModelToViewChange: false,
})),
);
}),
takeUntil(this.destroy$),
).subscribe();
}
/** Listens for newly added parameters and starts synchronization for them. */
private watchNewParams(): void {
this.getQueryParamGroup().queryParamAdded$.pipe(
takeUntil(this.destroy$)
).subscribe(queryParamName => {
this.setupParamChangeListener(queryParamName);
this.synchronizeRouter$.next();
});
}
/** Returns true if the current navigation is synthetic. */
private isSyntheticNavigation(): boolean {
const navigation = this.routerAdapter.getCurrentNavigation();
if (!navigation || navigation.trigger !== 'imperative') {
// When using the back / forward buttons, the state is passed along with it, even though
// for us it's now a navigation initiated by the user. Therefore, a navigation can only
// be synthetic if it has been triggered imperatively.
// See https://github.com/angular/angular/issues/28108.
return false;
}
return navigation.extras && navigation.extras.state && navigation.extras.state['synthetic'];
}
/** Subscribes to the parameter queue and executes navigations in sequence. */
private setupNavigationQueue() {
this.queue$.pipe(
takeUntil(this.destroy$),
concatMap(data => this.navigateSafely(data)),
).subscribe();
}
private navigateSafely(data: NavigationData): Observable<boolean> {
return from(this.routerAdapter.navigate(data.params, {
...this.routerOptions,
state: { synthetic: data.synthetic },
})).pipe(
catchError((err: unknown) => {
if (isDevMode()) {
console.error(`There was an error while navigating`, err);
}
return EMPTY;
})
);
}
/** Sends a change of parameters to the queue. */
private enqueueNavigation(data: NavigationData): void {
this.queue$.next(data);
}
/**
* Returns the full set of parameters given a value for a parameter model.
*
* This consists mainly of properly serializing the model value and ensuring to take
* side effect changes into account that may have been configured.
*/
private getParamsForValue(queryParam: QueryParam<unknown> | MultiQueryParam<unknown> | PartitionedQueryParam<unknown>, value: any): Params {
const partitionedQueryParam = this.wrapIntoPartition(queryParam);
const partitioned = partitionedQueryParam.partition(value);
const combinedParams = partitionedQueryParam.queryParams
.map((current, index) => isMissing(current.combineWith) ? null : current.combineWith(partitioned[index] as any))
.reduce((a, b) => {
return { ...(a || {}), ...(b || {}) };
}, {});
const newValues = partitionedQueryParam.queryParams
.map((current, index) => {
return {
[ current.urlParam ]: current.serializeValue(partitioned[index] as any),
};
})
.reduce((a, b) => {
return { ...a, ...b };
}, {});
// Note that we list the side-effect parameters first so that our actual parameter can't be
// overridden by it.
return {
...combinedParams,
...newValues,
};
}
/**
* Returns the current set of options to pass to the router.
*
* This merges the global configuration with the group specific configuration.
*/
private get routerOptions(): RouterOptions {
const groupOptions = this.getQueryParamGroup().routerOptions;
return {
...(this.globalRouterOptions || {}),
...(groupOptions || {}),
};
}
/**
* Returns the query parameter with the given name as a partition.
*
* If the query parameter is partitioned, it is returned unchanged. Otherwise
* it is wrapped into a noop partition. This makes it easy to operate on
* query parameters independent of whether they are partitioned.
*/
private getQueryParamAsPartition(queryParamName: string): PartitionedQueryParam<unknown> {
const queryParam = this.getQueryParamGroup().get(queryParamName);
if (!queryParam) {
throw new Error(`Could not find query param with name ${queryParamName}. Did you forget to add it to your QueryParamGroup?`);
}
return this.wrapIntoPartition(queryParam);
}
/**
* Wraps a query parameter into a partition if it isn't already.
*/
private wrapIntoPartition(
queryParam: QueryParam<unknown> | MultiQueryParam<unknown> | PartitionedQueryParam<unknown>
): PartitionedQueryParam<unknown> {
if (queryParam instanceof PartitionedQueryParam) {
return queryParam;
}
return new PartitionedQueryParam<unknown>([queryParam], {
partition: value => [value],
reduce: values => values[0],
});
}
private getQueryParamGroup(): QueryParamGroup {
if (!this.queryParamGroup) {
throw new Error(`No QueryParamGroup has been registered yet.`);
}
return this.queryParamGroup;
}
} | the_stack |
import { TextMetrics, TextStyle } from '@pixi/text';
import { expect } from 'chai';
/**
* Fonts render slightly differently between platforms so tests that depend on a specific
* widths or breaking of words may not be cross-platform
*/
/* eslint-disable no-multi-str */
const longText = 'Sed ut perspiciatis unde omnis iste natus error sit voluptatem \
accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo \
inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo \
enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia \
consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro \
quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, \
sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam \
quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam \
corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis \
autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil \
molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla \
pariatur?';
/* eslint-disable max-len */
const spaceNewLineText = ' Should have\u0009space\u2003at the\u2000beginning of the line.\n And 3 more here. But after that there should be no\u3000more ridiculous spaces at the beginning of lines. And none at the end. And all this text is just to check the wrapping abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz. I \u2665 text. 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 ';
const breakingWordText = 'Pixi.js - The HTML5 Creation Engine. Create beautiful digital content with the supercalifragilisticexpialidociously fastest, most flexible 2D WebGL renderer.';
const fillText = '. . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . ';
const intergityText = '012345678901234567890123456789';
const nonBreakingSpaces = ['\u00A0', '\u2007', '\u202F'];
const breakingSpaces = [
'\u0009',
'\u0020',
'\u2000',
'\u2001',
'\u2002',
'\u2003',
'\u2004',
'\u2005',
'\u2006',
'\u2008',
'\u2009',
'\u200A',
'\u205F',
'\u3000',
];
describe('TextMetrics', function ()
{
const defaultStyle = {
breakWords: true,
fontFamily: 'Arial',
fontSize: 20,
fontStyle: 'italic',
fontVariant: 'normal',
fontWeight: 900,
wordWrap: true,
wordWrapWidth: 200,
letterSpacing: 4,
};
describe('wordWrap without breakWords', function ()
{
it('width should not be greater than wordWrapWidth with longText', function ()
{
// On Windows 'exercitationem' renders to about 217px, bigger wrap width required for this test to be valid on every platform
const style = Object.assign({}, defaultStyle, { wordWrapWidth: 220, breakWords: false });
const metrics = TextMetrics.measureText(longText, new TextStyle(style));
expect(metrics.width).to.be.below(style.wordWrapWidth);
metrics.lines.forEach((line) =>
{
expect(line).to.not.contain(' ', 'should not have multiple spaces in a row');
expect(line[0]).to.not.equal(' ', 'should not have space at the start');
expect(line[line - 1]).to.not.equal(' ', 'should not have space at the end');
});
});
it('width should be greater than wordWrapWidth with breakingWordText', function ()
{
const style = Object.assign({}, defaultStyle, { breakWords: false });
const metrics = TextMetrics.measureText(breakingWordText, new TextStyle(style));
expect(metrics.width).to.be.above(style.wordWrapWidth);
metrics.lines.forEach((line) =>
{
expect(line[0]).to.not.equal(' ', 'should not have space at the start');
expect(line[line - 1]).to.not.equal(' ', 'should not have space at the end');
});
});
it('width should be within a character width from wordWrapWidth with fillText', function ()
{
const charWidth = 4; // it should fill the line to at lease width -4
const style = Object.assign({}, defaultStyle, { breakWords: false });
const metrics = TextMetrics.measureText(fillText, new TextStyle(style));
expect(metrics.width).to.be.below(style.wordWrapWidth);
expect(metrics.width + charWidth).to.be.above(style.wordWrapWidth);
metrics.lines.forEach((line) =>
{
expect(line[0]).to.not.equal(' ', 'should not have space at the start');
expect(line[line - 1]).to.not.equal(' ', 'should not have space at the end');
});
});
it('width should be greater than wordWrapWidth and should format correct spaces', function ()
{
const style = Object.assign({}, defaultStyle, { breakWords: false });
const metrics = TextMetrics.measureText(spaceNewLineText, new TextStyle(style));
expect(metrics.width).to.be.above(style.wordWrapWidth);
expect(metrics.lines[0][0]).to.equal(' ', '1st line should start with a space');
expect(metrics.lines[4][0]).to.equal(' ', '5th line should start with 3 spaces (1)');
expect(metrics.lines[4][1]).to.equal(' ', '5th line should start with 3 spaces (2)');
expect(metrics.lines[4][2]).to.equal(' ', '5th line should start with 3 spaces (3)');
expect(metrics.lines[4][3]).to.not.equal(' ', '5th line should not have a space as the 4th char');
metrics.lines.forEach((line, i) =>
{
if (i !== 0 && i !== 4)
{
expect(metrics.lines[1][0]).to.not.equal(' ', 'all lines except 1 & 5 should not have space at the start');
}
expect(line[line - 1]).to.not.equal(' ', 'no lines should have a space at the end');
});
});
it('should be able to override wordWrap to false in measureText', function ()
{
const metrics = TextMetrics.measureText(longText, new TextStyle(defaultStyle), false);
expect(metrics.lines.length).to.equal(1);
});
});
describe('wordWrap with breakWords', function ()
{
it('width should not be greater than wordWrapWidth with longText', function ()
{
const style = Object.assign({}, defaultStyle, { breakWords: true });
const metrics = TextMetrics.measureText(longText, new TextStyle(style));
expect(metrics.width).to.be.below(style.wordWrapWidth);
metrics.lines.forEach((line) =>
{
expect(line[0]).to.not.equal(' ', 'should not have space at the start');
expect(line[line - 1]).to.not.equal(' ', 'should not have space at the end');
});
});
it('width should not be greater than wordWrapWidth with breakingWordAtStartText', function ()
{
const style = Object.assign({}, defaultStyle, { breakWords: true });
const metrics = TextMetrics.measureText(breakingWordText, new TextStyle(style));
expect(metrics.width).to.be.below(style.wordWrapWidth);
metrics.lines.forEach((line) =>
{
expect(line[0]).to.not.equal(' ', 'should not have space at the start');
expect(line[line - 1]).to.not.equal(' ', 'should not have space at the end');
});
});
it('width should be within a character width from wordWrapWidth with fillText', function ()
{
const charWidth = 4; // it should fill the line to at lease width -4
const style = Object.assign({}, defaultStyle, { breakWords: true });
const metrics = TextMetrics.measureText(fillText, new TextStyle(style));
expect(metrics.width).to.be.below(style.wordWrapWidth);
expect(metrics.width + charWidth).to.be.above(style.wordWrapWidth);
metrics.lines.forEach((line) =>
{
expect(line[0]).to.not.equal(' ', 'should not have space at the start');
expect(line[line - 1]).to.not.equal(' ', 'should not have space at the end');
});
});
it('no words or characters should lost or changed', function ()
{
const style = Object.assign({}, defaultStyle, { breakWords: true });
const metrics = TextMetrics.measureText(intergityText, new TextStyle(style));
const lines = metrics.lines.reduce((accumulator, line) => accumulator + line);
expect(lines).to.equal(intergityText, 'should have the same chars as the original text');
});
it('width should not be greater than wordWrapWidth and should format correct spaces', function ()
{
const style = Object.assign({}, defaultStyle, { breakWords: true });
const metrics = TextMetrics.measureText(spaceNewLineText, new TextStyle(style));
expect(metrics.width).to.be.below(style.wordWrapWidth);
expect(metrics.lines[0][0]).to.equal(' ', '1st line should start with a space');
expect(metrics.lines[4][0]).to.equal(' ', '5th line should start with 3 spaces (1)');
expect(metrics.lines[4][1]).to.equal(' ', '5th line should start with 3 spaces (2)');
expect(metrics.lines[4][2]).to.equal(' ', '5th line should start with 3 spaces (3)');
expect(metrics.lines[4][3]).to.not.equal(' ', '5th line should not have a space as the 4th char');
metrics.lines.forEach((line, i) =>
{
if (i !== 0 && i !== 4)
{
expect(metrics.lines[1][0]).to.not.equal(' ', 'all lines except 1 & 5 should not have space at the start');
}
expect(line[line - 1]).to.not.equal(' ', 'no lines should have a space at the end');
});
});
});
describe('wordWrap misc', function ()
{
const originalSplit = TextMetrics.wordWrapSplit;
afterEach(function ()
{
TextMetrics.wordWrapSplit = originalSplit;
});
it('should use configuration callback to split a token', () =>
{
let wasSplitCalled = false;
TextMetrics.wordWrapSplit = (token) =>
{
wasSplitCalled = true;
expect(token).to.equal('testword1234567890abcd!');
return ['s', 'p', 'l', 'i', 't'];
};
const brokenText = TextMetrics.wordWrap('testword1234567890abcd!', new TextStyle(defaultStyle));
expect(wasSplitCalled).to.equal(true);
expect(brokenText).to.equal('split');
});
});
describe('whiteSpace `normal` without breakWords', function ()
{
it('multiple spaces should be collapsed to 1 and but not newlines', function ()
{
const style = Object.assign({}, defaultStyle, { breakWords: false, whiteSpace: 'normal' });
const metrics = TextMetrics.measureText(spaceNewLineText, new TextStyle(style));
expect(metrics.width).to.be.above(style.wordWrapWidth);
metrics.lines.forEach((line) =>
{
expect(line).to.not.contain(' ', 'should not have multiple spaces in a row');
expect(line[0]).to.not.equal(' ', 'all lines should not have space at the start');
expect(line[line - 1]).to.not.equal(' ', 'no lines should have a space at the end');
});
});
it('text is wrapped in a platform-specific way', function ()
{
if (process.platform === 'win32')
{
this.skip();
return;
}
const style = Object.assign({}, defaultStyle, { breakWords: false, whiteSpace: 'normal' });
const metrics = TextMetrics.measureText(spaceNewLineText, new TextStyle(style));
expect(metrics.lines[0][0]).to.equal('S', '1st line should not start with a space');
expect(metrics.lines[4][0]).to.equal('m', '5th line should not start with 3 spaces (1)');
expect(metrics.lines[4][1]).to.equal('o', '5th line should not start with 3 spaces (2)');
expect(metrics.lines[4][2]).to.equal('r', '5th line should not start with 3 spaces (3)');
expect(metrics.lines[17][0]).to.equal('a', '17th line should not have wrapped');
});
});
describe('whiteSpace `pre-line` without breakWords', function ()
{
it('multiple spaces should be collapsed to 1 but not newlines', function ()
{
const style = Object.assign({}, defaultStyle, { breakWords: false, whiteSpace: 'pre-line' });
const metrics = TextMetrics.measureText(spaceNewLineText, new TextStyle(style));
expect(metrics.width).to.be.above(style.wordWrapWidth);
expect(metrics.lines[0][0]).to.equal('S', '1st line should not start with a space');
expect(metrics.lines[4][0]).to.equal('A', '5th line should not start with 3 spaces (1)');
expect(metrics.lines[4][1]).to.equal('n', '5th line should not start with 3 spaces (2)');
expect(metrics.lines[4][2]).to.equal('d', '5th line should not start with 3 spaces (3)');
expect(metrics.lines[17][0]).to.equal('t', '17th line should have wrapped');
metrics.lines.forEach((line) =>
{
expect(line[0]).to.not.equal(' ', 'all lines should not have space at the start');
expect(line[line - 1]).to.not.equal(' ', 'no lines should have a space at the end');
});
});
});
describe('whiteSpace `normal` with breakWords', function ()
{
it('multiple spaces should be collapsed to 1 and but not newlines', function ()
{
const style = Object.assign({}, defaultStyle, { breakWords: true, whiteSpace: 'normal' });
const metrics = TextMetrics.measureText(spaceNewLineText, new TextStyle(style));
expect(metrics.width).to.be.below(style.wordWrapWidth);
metrics.lines.forEach((line) =>
{
expect(line).to.not.contain(' ', 'should not have multiple spaces in a row');
expect(line[0]).to.not.equal(' ', 'all lines should not have space at the start');
expect(line[line - 1]).to.not.equal(' ', 'no lines should have a space at the end');
});
});
it('text is wrapped in a platform-specific way', function ()
{
if (process.platform === 'win32')
{
this.skip();
return;
}
const style = Object.assign({}, defaultStyle, { breakWords: true, whiteSpace: 'normal' });
const metrics = TextMetrics.measureText(spaceNewLineText, new TextStyle(style));
expect(metrics.lines[0][0]).to.equal('S', '1st line should not start with a space');
expect(metrics.lines[4][0]).to.equal('m', '5th line should not start with 3 spaces (1)');
expect(metrics.lines[4][1]).to.equal('o', '5th line should not start with 3 spaces (2)');
expect(metrics.lines[4][2]).to.equal('r', '5th line should not start with 3 spaces (3)');
expect(metrics.lines[17][0]).to.equal('a', '17th line should not have wrapped');
});
});
describe('whiteSpace `pre-line` with breakWords', function ()
{
it('multiple spaces should be collapsed to 1 but not newlines', function ()
{
const style = Object.assign({}, defaultStyle, { breakWords: true, whiteSpace: 'pre-line' });
const metrics = TextMetrics.measureText(spaceNewLineText, new TextStyle(style));
expect(metrics.width).to.be.below(style.wordWrapWidth);
expect(metrics.lines[0][0]).to.equal('S', '1st line should not start with a space');
expect(metrics.lines[4][0]).to.equal('A', '5th line should not start with 3 spaces (1)');
expect(metrics.lines[4][1]).to.equal('n', '5th line should not start with 3 spaces (2)');
expect(metrics.lines[4][2]).to.equal('d', '5th line should not start with 3 spaces (3)');
expect(metrics.lines[17][0]).to.equal('t', '17th line should have wrapped');
metrics.lines.forEach((line) =>
{
expect(line[0]).to.not.equal(' ', 'all lines should not have space at the start');
expect(line[line - 1]).to.not.equal(' ', 'no lines should have a space at the end');
});
});
});
describe('trimRight', function ()
{
it('string with no whitespaces to trim', function ()
{
const text = TextMetrics.trimRight('remove white spaces to the right');
expect(text).to.equal('remove white spaces to the right');
});
it('string with whitespaces to trim', function ()
{
const text = TextMetrics.trimRight('remove white spaces to the right ');
expect(text).to.equal('remove white spaces to the right');
});
it('string with strange unicode whitespaces to trim', function ()
{
const text = TextMetrics.trimRight('remove white spaces to the right\u0009\u0020\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2008\u2009\u200A\u205F\u3000');
expect(text).to.equal('remove white spaces to the right');
});
it('empty string', function ()
{
const text = TextMetrics.trimRight('');
expect(text).to.equal('');
});
it('non-string input', function ()
{
const text = TextMetrics.trimRight({});
expect(text).to.equal('');
});
});
describe('isNewline', function ()
{
it('line feed', function ()
{
const bool = TextMetrics.isNewline('\n');
expect(bool).to.equal(true);
});
it('carriage return', function ()
{
const bool = TextMetrics.isNewline('\r');
expect(bool).to.equal(true);
});
it('newline char', function ()
{
const bool = TextMetrics.isNewline('A');
expect(bool).to.equal(false);
});
it('non string', function ()
{
const bool = TextMetrics.isNewline({});
expect(bool).to.equal(false);
});
});
describe('isBreakingSpace', function ()
{
it('legit breaking spaces', function ()
{
breakingSpaces.forEach((char) =>
{
const bool = TextMetrics.isBreakingSpace(char);
expect(bool).to.equal(true);
});
});
it('non breaking spaces', function ()
{
nonBreakingSpaces.forEach((char) =>
{
const bool = TextMetrics.isBreakingSpace(char);
expect(bool).to.not.equal(true);
});
});
it('newline char', function ()
{
const bool = TextMetrics.isBreakingSpace('A');
expect(bool).to.equal(false);
});
it('non string', function ()
{
const bool = TextMetrics.isBreakingSpace({});
expect(bool).to.equal(false);
});
it('overridable breaking spaces', function ()
{
const reg = /[あいうえお]/;
const original = TextMetrics.isBreakingSpace;
// override breakingSpace
TextMetrics.isBreakingSpace = function (char, nextChar)
{
const isBreakingSpace = breakingSpaces.includes(char);
if (isBreakingSpace && nextChar)
{
return !nextChar.match(reg);
}
return isBreakingSpace;
};
breakingSpaces.forEach((char) =>
{
const bool = TextMetrics.isBreakingSpace(char, 'あ');
expect(bool).to.equal(false);
});
// reset the override breakingSpace
TextMetrics.isBreakingSpace = original;
});
});
describe('tokenize', function ()
{
it('full example', function ()
{
const arr = TextMetrics.tokenize(spaceNewLineText);
expect(arr).to.be.an('array');
expect(arr.length).to.equal(146);
expect(arr).to.not.contain('');
expect(arr).to.not.contain(null);
});
it('empty string', function ()
{
const arr = TextMetrics.tokenize('');
expect(arr).to.be.an('array');
expect(arr.length).to.equal(0);
});
it('single char', function ()
{
const arr = TextMetrics.tokenize('A');
expect(arr).to.be.an('array');
expect(arr.length).to.equal(1);
});
it('newline char', function ()
{
const arr = TextMetrics.tokenize('\n');
expect(arr).to.be.an('array');
expect(arr.length).to.equal(1);
});
it('breakingSpaces', function ()
{
const arr = TextMetrics.tokenize(breakingSpaces.join(''));
expect(arr).to.be.an('array');
expect(arr.length).to.equal(breakingSpaces.length);
});
it('non string', function ()
{
const arr = TextMetrics.tokenize({});
expect(arr).to.be.an('array');
expect(arr.length).to.equal(0);
});
});
describe('collapseSpaces', function ()
{
it('pre', function ()
{
const bool = TextMetrics.collapseSpaces('pre');
expect(bool).to.equal(false);
});
it('normal', function ()
{
const bool = TextMetrics.collapseSpaces('normal');
expect(bool).to.equal(true);
});
it('pre-line', function ()
{
const bool = TextMetrics.collapseSpaces('pre-line');
expect(bool).to.equal(true);
});
it('non matching string', function ()
{
const bool = TextMetrics.collapseSpaces('bull');
expect(bool).to.equal(false);
});
it('non string', function ()
{
const bool = TextMetrics.collapseSpaces({});
expect(bool).to.equal(false);
});
});
describe('collapseNewlines', function ()
{
it('pre', function ()
{
const bool = TextMetrics.collapseNewlines('pre');
expect(bool).to.equal(false);
});
it('normal', function ()
{
const bool = TextMetrics.collapseNewlines('normal');
expect(bool).to.equal(true);
});
it('pre-line', function ()
{
const bool = TextMetrics.collapseNewlines('pre-line');
expect(bool).to.equal(false);
});
it('non matching string', function ()
{
const bool = TextMetrics.collapseNewlines('bull');
expect(bool).to.equal(false);
});
it('non string', function ()
{
const bool = TextMetrics.collapseNewlines({});
expect(bool).to.equal(false);
});
});
describe('canBreakWords', function ()
{
it('breakWords: true', function ()
{
const bool = TextMetrics.canBreakWords('text', true);
expect(bool).to.equal(true);
});
it('breakWords: false', function ()
{
const bool = TextMetrics.canBreakWords('text', false);
expect(bool).to.equal(false);
});
});
describe('canBreakChars', function ()
{
it('should always return true', function ()
{
const bool = TextMetrics.canBreakChars();
expect(bool).to.equal(true);
});
it('should prevent breaking for all numbers', function ()
{
const style = new TextStyle({
fontFamily: 'Arial',
fontSize: 26,
fontStyle: 'italic',
fontVariant: 'normal',
fontWeight: 900,
wordWrap: true,
wordWrapWidth: 300,
letterSpacing: 4,
padding: 10,
fill: 0xffffff,
breakWords: false,
whiteSpace: 'pre-line',
});
const str = '-------0000,1111,9999------';
const reg = /^\d+$/;
TextMetrics.canBreakWords = function ()
{
return true;
};
// override breakChars
TextMetrics.canBreakChars = function (char, nextChar)
{
return !(char.match(reg) && nextChar.match(reg));
};
const metrics = TextMetrics.measureText(str, style);
expect(metrics.lines[0]).to.equal('-------0000,1111,');
expect(metrics.lines[1]).to.equal('9999------');
});
});
}); | the_stack |
import * as path from "path";
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
//@ts-ignore
import { createInstallationZip } from "@webiny/api-page-builder/installation";
import { defineAppModule, PulumiApp, PulumiAppModule } from "@webiny/pulumi-sdk";
import { StorageOutput, VpcConfig } from "../common";
import { createLambdaRole, getCommonLambdaEnvVariables } from "../lambdaUtils";
import { getAwsAccountId, getAwsRegion } from "../awsUtils";
interface PageBuilderParams {
env: Record<string, any>;
}
export type ApiPageBuilder = PulumiAppModule<typeof ApiPageBuilder>;
export const ApiPageBuilder = defineAppModule({
name: "ApiPageBuilder",
config(app: PulumiApp, params: PageBuilderParams) {
const storage = app.getModule(StorageOutput);
app.addHandler(() => {
const pbInstallationZipPath = path.join(path.resolve(), ".tmp", "pbInstallation.zip");
// Will create "pbInstallation.zip" and save it in the `pbInstallationZipPath` path.
createInstallationZip(pbInstallationZipPath);
new aws.s3.BucketObject("./pbInstallation.zip", {
key: "pbInstallation.zip",
acl: "public-read",
bucket: storage.fileManagerBucketId,
contentType: "application/octet-stream",
source: new pulumi.asset.FileAsset(pbInstallationZipPath)
});
});
const updateSettings = createUpdateSettingsResources(app, params);
const exportPages = createExportPagesResources(app, params);
const importPages = createImportPagesResources(app, params);
return {
updateSettings,
exportPages,
importPages
};
}
});
function createUpdateSettingsResources(app: PulumiApp, params: PageBuilderParams) {
const policy = createUpdateSettingsLambdaPolicy(app);
const role = createLambdaRole(app, {
name: "pb-update-settings-lambda-role",
policy: policy.output
});
const update = app.addResource(aws.lambda.Function, {
name: "pb-update-settings",
config: {
role: role.output.arn,
runtime: "nodejs14.x",
handler: "handler.handler",
timeout: 10,
memorySize: 128,
description:
"Updates default Page Builder app's settings, e.g. website or prerendering URLs, default title, etc.",
code: new pulumi.asset.AssetArchive({
".": new pulumi.asset.FileArchive(
path.join(app.ctx.appDir, "code/pageBuilder/updateSettings/build")
)
}),
environment: {
variables: {
...getCommonLambdaEnvVariables(app),
...params.env
}
},
vpcConfig: app.getModule(VpcConfig).functionVpcConfig
}
});
return {
role,
policy,
functions: {
update
}
};
}
function createUpdateSettingsLambdaPolicy(app: PulumiApp) {
const storage = app.getModule(StorageOutput);
return app.addResource(aws.iam.Policy, {
name: "PbUpdateSettingsLambdaPolicy",
config: {
description: "This policy enables access to Dynamodb",
policy: {
Version: "2012-10-17",
Statement: [
{
Sid: "AllowDynamoDBAccess",
Effect: "Allow",
Action: [
"dynamodb:BatchGetItem",
"dynamodb:BatchWriteItem",
"dynamodb:PutItem",
"dynamodb:DeleteItem",
"dynamodb:GetItem",
"dynamodb:Query",
"dynamodb:UpdateItem"
],
Resource: [
pulumi.interpolate`${storage.primaryDynamodbTableArn}`,
pulumi.interpolate`${storage.primaryDynamodbTableArn}/*`
]
}
]
}
}
});
}
function createExportPagesResources(app: PulumiApp, params: PageBuilderParams) {
const storage = app.getModule(StorageOutput);
const policy = createExportPagesLambdaPolicy(app);
const role = createLambdaRole(app, {
name: "pb-export-pages-lambda-role",
policy: policy.output
});
const combine = app.addResource(aws.lambda.Function, {
name: "pb-export-pages-combine",
config: {
role: role.output.arn,
runtime: "nodejs14.x",
handler: "handler.handler",
timeout: 60,
memorySize: 128,
description: "Handle page export's combine workflow",
code: new pulumi.asset.AssetArchive({
".": new pulumi.asset.FileArchive(
path.join(app.ctx.appDir, "code/pageBuilder/exportPages/combine/build")
)
}),
environment: {
variables: {
...getCommonLambdaEnvVariables(app),
...params.env,
S3_BUCKET: storage.fileManagerBucketId
}
}
}
});
const process = app.addResource(aws.lambda.Function, {
name: "pb-export-pages-process",
config: {
role: role.output.arn,
runtime: "nodejs14.x",
handler: "handler.handler",
timeout: 60,
memorySize: 128,
description: "Handle page export's process workflow",
code: new pulumi.asset.AssetArchive({
".": new pulumi.asset.FileArchive(
path.join(app.ctx.appDir, "code/pageBuilder/exportPages/process/build")
)
}),
environment: {
variables: {
...getCommonLambdaEnvVariables(app),
...params.env,
S3_BUCKET: storage.fileManagerBucketId,
EXPORT_PAGE_COMBINE_HANDLER: combine.output.arn
}
}
}
});
return {
role,
policy,
functions: {
process,
combine
}
};
}
function createExportPagesLambdaPolicy(app: PulumiApp) {
const storage = app.getModule(StorageOutput);
const awsAccountId = getAwsAccountId(app);
const awsRegion = getAwsRegion(app);
return app.addResource(aws.iam.Policy, {
name: "PbExportPageTaskLambdaPolicy",
config: {
description: "This policy enables access to Dynamodb",
policy: {
Version: "2012-10-17",
Statement: [
{
Sid: "AllowDynamoDBAccess",
Effect: "Allow",
Action: [
"dynamodb:BatchGetItem",
"dynamodb:BatchWriteItem",
"dynamodb:PutItem",
"dynamodb:DeleteItem",
"dynamodb:GetItem",
"dynamodb:Query",
"dynamodb:UpdateItem"
],
Resource: [
pulumi.interpolate`${storage.primaryDynamodbTableArn}`,
pulumi.interpolate`${storage.primaryDynamodbTableArn}/*`
]
},
{
Sid: "PermissionForS3",
Effect: "Allow",
Action: [
"s3:GetObjectAcl",
"s3:DeleteObject",
"s3:PutObjectAcl",
"s3:PutObject",
"s3:GetObject",
"s3:ListBucket"
],
Resource: [
pulumi.interpolate`arn:aws:s3:::${storage.fileManagerBucketId}/*`,
// We need to explicitly add bucket ARN to "Resource" list for "s3:ListBucket" action.
pulumi.interpolate`arn:aws:s3:::${storage.fileManagerBucketId}`
]
},
{
Sid: "PermissionForLambda",
Effect: "Allow",
Action: ["lambda:InvokeFunction"],
Resource: pulumi.interpolate`arn:aws:lambda:${awsRegion}:${awsAccountId}:function:*`
}
]
}
}
});
}
function createImportPagesResources(app: PulumiApp, params: PageBuilderParams) {
const storage = app.getModule(StorageOutput);
const policy = createImportPagesLambdaPolicy(app);
const role = createLambdaRole(app, {
name: "pb-import-page-lambda-role",
policy: policy.output
});
const process = app.addResource(aws.lambda.Function, {
name: "pb-import-page-queue-process",
config: {
role: role.output.arn,
runtime: "nodejs14.x",
handler: "handler.handler",
timeout: 60,
memorySize: 512,
description: "Handle import page queue process workflow",
code: new pulumi.asset.AssetArchive({
".": new pulumi.asset.FileArchive(
path.join(app.ctx.appDir, "code/pageBuilder/importPages/process/build")
)
}),
environment: {
variables: {
...getCommonLambdaEnvVariables(app),
...params.env,
S3_BUCKET: storage.fileManagerBucketId
}
}
}
});
const create = app.addResource(aws.lambda.Function, {
name: "pb-import-page-queue-create",
config: {
role: role.output.arn,
runtime: "nodejs14.x",
handler: "handler.handler",
timeout: 60,
memorySize: 512,
description: "Handle import page queue create workflow",
code: new pulumi.asset.AssetArchive({
".": new pulumi.asset.FileArchive(
path.join(app.ctx.appDir, "code/pageBuilder/importPages/create/build")
)
}),
environment: {
variables: {
...getCommonLambdaEnvVariables(app),
...params.env,
S3_BUCKET: storage.fileManagerBucketId,
IMPORT_PAGE_QUEUE_PROCESS_HANDLER: process.output.arn
}
}
}
});
return {
role,
policy,
functions: {
create,
process
}
};
}
function createImportPagesLambdaPolicy(app: PulumiApp) {
const storageOutput = app.getModule(StorageOutput);
const awsAccountId = getAwsAccountId(app);
const awsRegion = getAwsRegion(app);
return app.addResource(aws.iam.Policy, {
name: "ImportPageLambdaPolicy",
config: {
description: "This policy enables access Dynamodb, S3, Lambda and Cognito IDP",
// Storage is pulumi.Output, so we need to run apply() to resolve policy based on it
policy: storageOutput.apply(storage => {
const policy: aws.iam.PolicyDocument = {
Version: "2012-10-17",
Statement: [
{
Sid: "PermissionForDynamodb",
Effect: "Allow",
Action: [
"dynamodb:BatchGetItem",
"dynamodb:BatchWriteItem",
"dynamodb:PutItem",
"dynamodb:DeleteItem",
"dynamodb:GetItem",
"dynamodb:Query",
"dynamodb:UpdateItem"
],
Resource: [
`${storage.primaryDynamodbTableArn}`,
`${storage.primaryDynamodbTableArn}/*`,
// Attach permissions for elastic search dynamo as well (if ES is enabled).
...(storage.elasticsearchDynamodbTableArn
? [
`${storage.elasticsearchDynamodbTableArn}`,
`${storage.elasticsearchDynamodbTableArn}/*`
]
: [])
]
},
{
Sid: "PermissionForS3",
Effect: "Allow",
Action: [
"s3:GetObjectAcl",
"s3:DeleteObject",
"s3:PutObjectAcl",
"s3:PutObject",
"s3:GetObject",
"s3:ListBucket"
],
Resource: [
`arn:aws:s3:::${storage.fileManagerBucketId}/*`,
// We need to explicitly add bucket ARN to "Resource" list for "s3:ListBucket" action.
`arn:aws:s3:::${storage.fileManagerBucketId}`
]
},
{
Sid: "PermissionForLambda",
Effect: "Allow",
Action: ["lambda:InvokeFunction"],
Resource: pulumi.interpolate`arn:aws:lambda:${awsRegion}:${awsAccountId}:function:*`
},
{
Sid: "PermissionForCognitoIdp",
Effect: "Allow",
Action: "cognito-idp:*",
Resource: `${storage.cognitoUserPoolArn}`
},
// Attach permissions for elastic search domain as well (if ES is enabled).
...(storage.elasticsearchDomainArn
? [
{
Sid: "PermissionForES",
Effect: "Allow" as const,
Action: "es:*",
Resource: [
`${storage.elasticsearchDomainArn}`,
`${storage.elasticsearchDomainArn}/*`
]
}
]
: [])
]
};
return policy;
})
}
});
} | the_stack |
import { Component, OnInit, ViewChild, ElementRef, NgZone, Input } from '@angular/core';
import { CommunicationService } from '../communication.service';
import { BehaviorSubject } from 'rxjs'
import {MatInputModule} from '@angular/material/input';
import {MatFormFieldModule} from '@angular/material/form-field';
import {MatButtonModule} from '@angular/material/button';
import {MatGridListModule} from '@angular/material/grid-list';
import {MatTableModule} from '@angular/material/table';
import {MatTableDataSource} from '@angular/material/table';
import {MatListModule} from '@angular/material/list';
import {MatTooltipModule} from '@angular/material/tooltip';
import {TooltipPosition} from '@angular/material/tooltip';
import * as ace from 'ace-builds'; // ace module ..
// language package, choose your own
import 'ace-builds/src-noconflict/mode-javascript';
// ui-theme package
import 'ace-builds/src-noconflict/theme-monokai';
import {NgxGraphModule} from '@swimlane/ngx-graph'
import * as shape from 'd3-shape';
import { ElementSchemaRegistry } from '@angular/compiler';
interface StoreElem {
addr: string;
refs?: string[];
val?: string;
lastElem: boolean;
closure: boolean;
variableName?: string;
}
interface StackElem {
name: string;
addr: string;
}
interface EnvElem {
var: string;
addr: string;
}
interface Expr {
short: string;
long: string;
}
interface EvaluatedExpr {
short: string;
long: string;
val: string;
}
interface StoreGraphNode {
id: string;
label: string;
dimension: {width: number, height: number}
}
interface StoreGraphEdge{
source: string;
target: string;
label: string;
}
const THEME = 'ace/theme/github';
const LANG = 'ace/mode/javascript';
@Component({
selector: 'app-editor',
templateUrl: './editor.component.html',
styleUrls: ['./editor.component.css']
})
export class EditorComponent implements OnInit {
@Input() path: String;
code: string; // text in editor
stack; // stack
latestStore:StoreElem [] ; // store as list of store elements
latestStoreTable; // store as table object to allow filtering
latestEnv: EnvElem []; // environment as list of env elements
hierarchialGraph = {nodes: [], links: []} // CFG
edges; // CFG edges
nodes; // CFG nodes
hierarchialGraphStore = {nodes: [], links: []} // graph of resolved store element
storeGraphNodes: StoreGraphNode []; // nodes of resolved store element
storeGraphEdges: StoreGraphEdge []; // edges of resolved store element
derefStoreElem: string; // value of resolved store element
derefStoreElemAddress: string; // address of resolved store element
exprs: Expr [] = []; // list of processed expressions
evaluatedExprs: EvaluatedExpr [] = []; // list of evaluated expressions and values
curve = shape.curveBundle.beta(1); // CFG curves
// apply filter on store table
applyFilter(event: Event) {
const filterValue = (event.target as HTMLInputElement).value;
this.latestStoreTable.filter = filterValue.trim().toLowerCase();
}
constructor(private chatService: CommunicationService, private ngZone: NgZone) {
// calls corresponding handler if websocket message arrives
chatService.messages.subscribe(msg => {
if(msg.tag == "LoadSourceCodeResponse"){
this.loadSourceCodeResponseHandler(JSON.stringify(msg))
}
else if(msg.tag == "RefreshResponse"){
this.refreshResponseHandler(JSON.stringify(msg))
}
else if(msg.tag == "BreakpointResponse"){
this.breakpointResponseHandler(JSON.stringify(msg))
}
else if(msg.tag == "ExceptionResponse"){
this.exceptionResponseHandler(JSON.stringify(msg))
}
else if(msg.tag == "CurrentExpressionResponse"){
this.currentExpressionResponseHandler(JSON.stringify(msg))
}
else if(msg.tag == "EvaluatedExpressionResponse"){
this.evaluatedExpressionResponseHandler(JSON.stringify(msg))
}
})
}
// initialize text editor
ngOnInit(): void {
var editor = ace.edit( "editor1" );
editor.setTheme("ace/theme/monokai");
editor.setValue("Insert code here!");
}
// SEND REQUESTS
// extracts code in text editor and sends it as StartDebuggerRequest -> server starts debugging
sendStartRequest(){
var editor = ace.edit( "editor1" );
var code = editor.getValue();
let startDebuggerRequest = {
"tag":"StartDebuggerRequest",
"code":code
}
this.chatService.startDebuggerRequests.next(startDebuggerRequest);
}
// send ContinueRequest -> server evaluates expressions until a breakpoint is reached
sendContinueRequest(){
let continueRequest = {
"tag":"ContinueRequest"
}
this.chatService.continueRequests.next(continueRequest);
}
// send StepRequest -> server evaluates next expression
sendStepRequest(){
let stepRequest = {
"tag":"StepRequest"
}
this.chatService.stepRequests.next(stepRequest);
}
// extracts filename of form and sends LoadSourceCodeRequest -> server responds with source code of file
sendLoadSourceCodeRequest(){
var bla = this.path;
let loadSourceCodeRequestMessage = {
"tag":"LoadSourceCodeRequest",
"path":this.path
}
this.chatService.messages.next(loadSourceCodeRequestMessage);
}
// send RefreshRequest -> server refreshs debug state
sendRefreshRequest(){
let refreshRequestMessage = {
"tag":"RefreshRequest"
}
this.chatService.refreshRequests.next(refreshRequestMessage);
}
// RESPONSE HANDLER
// set value of editor corresponding to source code in websocket message
loadSourceCodeResponseHandler(msg){
var obj = JSON.parse(msg)
this.code = obj.code
var editor = ace.edit( "editor1" );
editor.setValue(this.code);
}
// process debug information of websocket message
breakpointResponseHandler(msg){
var obj = JSON.parse(msg)
this.nodes = obj.cfgNodes
this.edges = obj.cfgEdges
this.createCFG(this.nodes, this.edges)
var unparsedEnv = obj.latestEnv
this.latestEnv = this.parseEnv(unparsedEnv)
this.latestStore = this.createStore(obj.latestStore)
this.latestStoreTable = new MatTableDataSource(this.latestStore)
var unparsedStack = this.createStack(obj.stack)
console.log(unparsedStack)
this.stack = this.parseStack(unparsedStack)
}
// sets debug information and editor text value to default values
refreshResponseHandler(msg){
this.code = "Insert code here!"
this.nodes = []
this.edges = []
this.stack = []
var editor = ace.edit( "editor1" );
editor.setValue(this.code);
}
// logs exception to console
exceptionResponseHandler(msg){
var obj = JSON.parse(msg)
console.log(obj.exception)
}
// displays current expression
currentExpressionResponseHandler(msg){
var obj = JSON.parse(msg)
if(obj.expr.length < 60){
this.exprs.push({short:obj.expr, long:obj.expr})
}
else{
this.exprs.push({short:obj.expr.slice(0,50) + "...", long:obj.expr})
}
var objDiv = document.getElementById("exprsDiv");
objDiv.scrollIntoView(false)
}
// displays the evaluated expression and the value
evaluatedExpressionResponseHandler(msg){
var obj = JSON.parse(msg)
if(obj.expr.length < 60){
this.evaluatedExprs.push({short:obj.expr, long:obj.expr, val: obj.val})
}
else{
this.evaluatedExprs.push({short:obj.expr.slice(0,50) + "...", long:obj.expr, val: obj.val})
}
var objEvalDiv = document.getElementById("evaluatedExprsDiv");
objEvalDiv.scrollIntoView(false)
}
// updates CFG to given nodes and edges
changeGraph(nodes,edges) {
this.hierarchialGraph.nodes = nodes;
this.hierarchialGraph.links = edges;
}
// parses raw data of websocket message and calls changeGraph()
createCFG(nodes, edges) {
var parsedNodes = []
var parsedEdges = []
for (var node of nodes){
parsedNodes.push({
id: node[0],
label: node[1],
dimension: {width: (node[1].length + 1) * 7, height:30}})
}
for (var edge of edges){
parsedEdges.push({
source: edge[0],
target: edge[1],
label: ''
})
}
this.changeGraph(parsedNodes,parsedEdges)
}
// returns structured stack
createStack(stackElems){
var outputList = []
for (var stackElemC = 0; stackElemC < stackElems.length; stackElemC ++) {
var stackElem = stackElems[stackElemC]
var store:any = stackElem[0]
var functionBodys: any = stackElem[1]
for (var functionBodyC = 0; functionBodyC < functionBodys.length; functionBodyC ++){
var functionBody = functionBodys[functionBodyC]
for (var storeElemC = 0; storeElemC < store.length; storeElemC ++){
var storeElem = store[storeElemC]
var slicedFunctionBody = functionBody.slice(1,functionBody.length-1)
if (storeElem[1].includes(slicedFunctionBody)){
outputList.push(storeElem)
}
}
}
}
return outputList
}
// returnes stack as list of StackElem
parseStack(unparsedStack){
var stackElems: StackElem [] = []
for (var count = 0; count < unparsedStack.length; count ++){
var stackElem = unparsedStack[count]
var addr = stackElem[0]
var reAddr = /^(\S*)(#\d*)\[]$/
var match = reAddr.exec(addr)
if(match != null){
var name = match[1]
var number = match[2]
stackElems.push({name: name,addr: number})
}
}
return stackElems
}
// creates unparsed store from raw data
createStore(latestStore){
var store: StoreElem[] = [];
for (var count = 0; count < latestStore.length; count ++){
var storeElem = latestStore[count]
var addr = storeElem[0]
var val = storeElem[1]
var usualAddr = /^(\d*)\[\]/
var hashAddr = /^(\S*#\d*)\[\]/
var refs = /^Cons\({(\d*)\[]},\{(\d*)\[]}\)/
var matchUsualAddr = usualAddr.exec(addr)
if(matchUsualAddr != null){
var objAddr = matchUsualAddr[1]
var objVariableName = this.getVarNameFromEnv(objAddr)
var matchRefs = refs.exec(val)
if(matchRefs != null){
var objRefs = [matchRefs[1],matchRefs[2]]
store.push({addr : objAddr, refs: objRefs, lastElem: false, closure: false, val:val, variableName: objVariableName})
}
else{
var objVal = val
store.push({addr : objAddr, val: objVal, lastElem: true, closure: false, variableName: objVariableName})
}
}
var matchHashAddr = hashAddr.exec(addr)
if(matchHashAddr != null){
var objAddr = matchHashAddr[1]
var objVariableName = this.getVarNameFromEnv(objAddr)
var matchRefs = refs.exec(val)
if(matchRefs != null){
var objRefs = [matchRefs[1],matchRefs[2]]
store.push({addr : objAddr, refs: objRefs, lastElem: false, closure: true, val: val, variableName: objVariableName})
}
else{
var objVal = val
store.push({addr : objAddr, val: objVal, lastElem: true, closure: true, variableName: objVariableName})
}
}
}
return store
}
// returns variable name of environment element to corresponding address
getVarNameFromEnv(objAddr){
var env = this.latestEnv;
for (var envElem of env) {
if(objAddr == envElem.addr){
return (envElem.var)
}
}
return ""
}
// parses environment to list of EnvElem
parseEnv(unparsedEnv){
var envElems: EnvElem [] = []
for (var count = 0; count < unparsedEnv.length; count ++){
var envElem = unparsedEnv[count]
var name = envElem[0]
var addr = envElem[1]
name = name.slice(1,name.length-1)
addr = addr.slice(0,addr.length-2)
envElems.push({var: name, addr:addr})
}
return envElems
}
// updates graph and resolved value of chosen store element
generateDereferencedStoreElem(storeElem: StoreElem){
console.log(storeElem)
this.storeGraphNodes = []
this.storeGraphEdges = []
this.derefStoreElemAddress = storeElem.addr
this.generateDereferencedGraph(storeElem)
this.changeStoreGraph()
this.derefStoreElem = this.generateDereferencedValue(storeElem)
}
// create resolved graph of store element
generateDereferencedGraph(storeElem: StoreElem){
if (storeElem.lastElem){
if(!this.checkIfNodeExists(storeElem.addr)){
this.storeGraphNodes.push({
id: storeElem.addr,
label: storeElem.addr,
dimension: {width: (storeElem.addr.length + 1) * 7, height:30}})
this.storeGraphNodes.push({
id: storeElem.val + storeElem.addr,
label: storeElem.val,
dimension: {width: (storeElem.val.length + 1) * 7, height:30}})
this.storeGraphEdges.push({
source: storeElem.addr,
target: storeElem.val + storeElem.addr,
label: ''
})
}
else{
this.storeGraphNodes.push({
id: storeElem.val + storeElem.addr,
label: storeElem.val,
dimension: {width: (storeElem.val.length + 1) * 7, height:30}})
this.storeGraphEdges.push({
source: storeElem.addr,
target: storeElem.val + storeElem.addr,
label: ''})
}
}
if(!storeElem.lastElem){
if(!this.checkIfNodeExists(storeElem.addr)){
this.storeGraphNodes.push({
id: "160",
label: storeElem.addr,
dimension: {width: (storeElem.addr.length + 1) * 7, height:30}})
var firstRefAddr = storeElem.refs[0]
this.storeGraphNodes.push({
id: firstRefAddr,
label: firstRefAddr,
dimension: {width: (firstRefAddr.length + 1) * 7, height:30}})
var secondRefAddr = storeElem.refs[1]
this.storeGraphNodes.push({
id: secondRefAddr,
label: secondRefAddr,
dimension: {width: (secondRefAddr.length + 1) * 7, height:30}})
this.storeGraphEdges.push({
source: "160",
target: firstRefAddr,
label: 'Cons'
})
this.storeGraphEdges.push({
source: "160",
target: secondRefAddr,
label: 'Cons'
})
}
else{
var firstRefAddr = storeElem.refs[0]
this.storeGraphNodes.push({
id: firstRefAddr,
label: firstRefAddr,
dimension: {width: (firstRefAddr.length + 1) * 7, height:30}})
var secondRefAddr = storeElem.refs[1]
this.storeGraphNodes.push({
id: secondRefAddr,
label: secondRefAddr,
dimension: {width: (secondRefAddr.length + 1) * 7, height:30}})
this.storeGraphEdges.push({
source: storeElem.addr,
target: firstRefAddr,
label: 'Cons'
})
this.storeGraphEdges.push({
source: storeElem.addr,
target: secondRefAddr,
label: 'Cons'
})
}
var firstRefStoreElem = this.getObjectFromStore(storeElem.refs[0])
var secondRefStoreElem = this.getObjectFromStore(storeElem.refs[1])
this.generateDereferencedGraph(firstRefStoreElem)
this.generateDereferencedGraph(secondRefStoreElem)
}
}
// checks if addr exists in current nodes of resolved store element graph
checkIfNodeExists(addr){
for(var node of this.storeGraphNodes){
if(node.id == addr){
return true
}
}
return false
}
// updates resolved graph of store element
changeStoreGraph() {
this.hierarchialGraphStore.nodes = this.storeGraphNodes;
this.hierarchialGraphStore.links = this.storeGraphEdges;
}
// returns object from store by address
getObjectFromStore(addr){
for(var storeElem of this.latestStore){
if(storeElem.addr == addr){
return storeElem
}
}
}
// recursively generates resolved value of store element
generateDereferencedValue(storeElem: StoreElem){
if(storeElem.lastElem){
return storeElem.val
}
var firstRefStoreElem = this.getObjectFromStore(storeElem.refs[0])
var secondRefStoreElem = this.getObjectFromStore(storeElem.refs[1])
return (this.generateDereferencedValue(firstRefStoreElem) + "," + this.generateDereferencedValue(secondRefStoreElem))
}
} | the_stack |
import { ErrorConstant } from '@remirror/core-constants';
import {
assertGet,
entries,
invariant,
isFunction,
isPromise,
isString,
} from '@remirror/core-helpers';
import type {
AttributesProps,
CommandFunction,
CommandFunctionProps,
FromToProps,
MarkType,
MarkTypeProps,
PrimitiveSelection,
ProsemirrorAttributes,
ProsemirrorNode,
} from '@remirror/core-types';
import { convertCommand, getCursor, getTextSelection, isMarkActive } from '@remirror/core-utils';
import { toggleMark as originalToggleMark } from '@remirror/pm/commands';
import type { SelectionRange } from '@remirror/pm/state';
/**
* The parameter that is passed into `DelayedCommand`s.
*/
interface DelayedCommandProps<Value> {
/**
* Runs as soon as the command is triggered. For most delayed commands within
* the `remirror` codebase this is used to add a position tracker to the
* document.
*/
immediate?: CommandFunction;
/**
* The promise that provides the value that the `onDone` callback uses to
* complete the delayed command.
*/
promise: DelayedValue<Value>;
/**
* Called when the provided promise resolves.
*/
onDone: CommandFunction<{ value: Value }>;
/**
* Called when the promise fails. This could be used to cleanup the active
* position trackers when the delayed command fails.
*/
onFail?: CommandFunction;
}
export type DelayedValue<Type> = Promise<Type> | (() => Promise<Type>);
/**
* Returns `true` when the provided value is a delayed value.
*/
export function isDelayedValue<Type>(value: unknown): value is DelayedValue<Type> {
return isFunction(value) || isPromise(value);
}
/**
* Add tentative support for delayed commands in the editor.
*
* Delayed commands are commands that run an immediate action, like adding a
* tracker to a position in the document. Once the promise that is provided is
* returned the `onDone` parameter is run with the document in the current
* state. The tracker that was added can now be used to insert content, delete
* content or replace content.
*
* @experimental This is still being worked on and the API is subject to changes
* in structure going forward.
*
* @deprecated use [[`DelayedCommand`]] instead.
*
*/
export function delayedCommand<Value>({
immediate,
promise,
onDone,
onFail,
}: DelayedCommandProps<Value>): CommandFunction {
return (props) => {
const { view } = props;
if (immediate?.(props) === false) {
return false;
}
if (!view) {
return true;
}
const deferred = isFunction(promise) ? promise() : promise;
deferred
.then((value) => {
// Run the command
onDone({ state: view.state, tr: view.state.tr, dispatch: view.dispatch, view, value });
})
.catch(() => {
// Run the failure command if it exists.
onFail?.({ state: view.state, tr: view.state.tr, dispatch: view.dispatch, view });
});
return true;
};
}
export type DelayedPromiseCreator<Value> = (props: CommandFunctionProps) => Promise<Value>;
export class DelayedCommand<Value> {
private readonly failureHandlers: Array<CommandFunction<{ error: any }>> = [];
private readonly successHandlers: Array<CommandFunction<{ value: Value }>> = [];
private readonly validateHandlers: CommandFunction[] = [];
constructor(private readonly promiseCreator: DelayedPromiseCreator<Value>) {}
/**
* The commands that will immediately be run and used to evaluate whether to
* proceed.
*/
validate(handler: CommandFunction, method: 'push' | 'unshift' = 'push'): this {
this.validateHandlers[method](handler);
return this;
}
/**
* Add a success callback to the handler.
*/
success(handler: CommandFunction<{ value: Value }>, method: 'push' | 'unshift' = 'push'): this {
this.successHandlers[method](handler);
return this;
}
/**
* Add a failure callback to the handler.
*/
failure(handler: CommandFunction<{ error: any }>, method: 'push' | 'unshift' = 'push'): this {
this.failureHandlers[method](handler);
return this;
}
private runHandlers<Param extends CommandFunctionProps>(
handlers: Array<(params: Param) => boolean>,
param: Param,
): void {
for (const handler of handlers) {
if (!handler({ ...param, dispatch: () => {} })) {
break;
}
}
param.dispatch?.(param.tr);
}
/**
* Generate the `remirror` command.
*/
readonly generateCommand = (): CommandFunction => {
return (props) => {
let isValid = true;
const { view, tr, dispatch } = props;
if (!view) {
return false;
}
for (const handler of this.validateHandlers) {
if (!handler({ ...props, dispatch: () => {} })) {
isValid = false;
break;
}
}
if (!dispatch || !isValid) {
return isValid;
}
// Start the promise.
const deferred = this.promiseCreator(props);
deferred
.then((value) => {
this.runHandlers(this.successHandlers, {
value,
state: view.state,
tr: view.state.tr,
dispatch: view.dispatch,
view,
});
})
.catch((error) => {
this.runHandlers(this.failureHandlers, {
error,
state: view.state,
tr: view.state.tr,
dispatch: view.dispatch,
view,
});
});
dispatch(tr);
return true;
};
};
}
export interface ToggleMarkProps extends MarkTypeProps, Partial<AttributesProps> {
/**
* @deprecated use `selection` property instead.
*/
range?: FromToProps;
/**
* The selection point for toggling the chosen mark.
*/
selection?: PrimitiveSelection;
}
/**
* A custom `toggleMark` function that works for the `remirror` codebase.
*
* Create a command function that toggles the given mark with the given
* attributes. Will return `false` when the current selection doesn't support
* that mark. This will remove the mark if any marks of that type exist in the
* selection, or add it otherwise. If the selection is empty, this applies to
* the [stored marks](#state.EditorState.storedMarks) instead of a range of the
* document.
*
* The differences from the `prosemirror-commands` version.
* - Acts on the transaction rather than the state to allow for commands to be
* chained together.
* - Uses the ONE parameter function signature for compatibility with remirror.
* - Supports passing a custom range.
*/
export function toggleMark(props: ToggleMarkProps): CommandFunction {
const { type, attrs, range, selection } = props;
return (props) => {
const { dispatch, tr, state } = props;
const markType = isString(type) ? state.schema.marks[type] : type;
invariant(markType, {
code: ErrorConstant.SCHEMA,
message: `Mark type: ${type} does not exist on the current schema.`,
});
if (range || selection) {
const { from, to } = getTextSelection(selection ?? range ?? tr.selection, tr.doc);
isMarkActive({ trState: tr, type, ...range })
? dispatch?.(tr.removeMark(from, to, markType))
: dispatch?.(tr.addMark(from, to, markType.create(attrs)));
return true;
}
return convertCommand(originalToggleMark(markType, attrs))(props);
};
}
/**
* Verifies that the mark type can be applied to the current document.
*/
function markApplies(type: MarkType, doc: ProsemirrorNode, ranges: readonly SelectionRange[]) {
for (const { $from, $to } of ranges) {
let markIsAllowed = $from.depth === 0 ? doc.type.allowsMarkType(type) : false;
doc.nodesBetween($from.pos, $to.pos, (node) => {
if (markIsAllowed) {
// This prevents diving deeper into child nodes.
return false;
}
markIsAllowed = node.inlineContent && node.type.allowsMarkType(type);
return;
});
if (markIsAllowed) {
return true;
}
}
return false;
}
/**
* Apply the provided mark type and attributes.
*
* @param markType - the mark to apply.
* @param attrs - the attributes to set on the applied mark.
* @param selectionPoint - optionally specify where the mark should be applied.
* Defaults to the current selection.
*/
export function applyMark(
type: string | MarkType,
attrs?: ProsemirrorAttributes,
selectionPoint?: PrimitiveSelection,
): CommandFunction {
return ({ tr, dispatch, state }) => {
const selection = getTextSelection(selectionPoint ?? tr.selection, tr.doc);
const $cursor = getCursor(selection);
const markType = isString(type) ? state.schema.marks[type] : type;
invariant(markType, {
code: ErrorConstant.SCHEMA,
message: `Mark type: ${type} does not exist on the current schema.`,
});
if ((selection.empty && !$cursor) || !markApplies(markType, tr.doc, selection.ranges)) {
return false;
}
if (!dispatch) {
return true;
}
if ($cursor) {
tr.removeStoredMark(markType);
if (attrs) {
tr.addStoredMark(markType.create(attrs));
}
dispatch(tr);
return true;
}
let containsMark = false;
for (const { $from, $to } of selection.ranges) {
if (containsMark) {
break;
}
containsMark = tr.doc.rangeHasMark($from.pos, $to.pos, markType);
}
for (const { $from, $to } of selection.ranges) {
if (containsMark) {
tr.removeMark($from.pos, $to.pos, markType);
}
if (attrs) {
tr.addMark($from.pos, $to.pos, markType.create(attrs));
}
}
dispatch(tr);
return true;
};
}
export interface InsertTextOptions extends Partial<FromToProps> {
/**
* Marks can be added to the inserted text.
*/
marks?: Record<string, ProsemirrorAttributes>;
}
/**
* Insert text into the dom at the current location by default. If a promise is
* provided then the text will be inserted at the tracked position when the
* promise is resolved.
*/
export function insertText(text: string, options: InsertTextOptions = {}): CommandFunction {
return ({ tr, dispatch, state }) => {
const schema = state.schema;
const selection = tr.selection;
const { from = selection.from, to = from ?? selection.to, marks = {} } = options;
if (!dispatch) {
return true;
}
// Insert the text
tr.insertText(text, from, to);
// Map the end position after inserting the text to understand what needs to
// be wrapped with a mark.
const end = assertGet(tr.steps, tr.steps.length - 1)
.getMap()
.map(to);
// Loop through the provided marks to add the mark to the selection. This
// uses the order of the map you created. If any marks are exclusive, they
// will override the previous.
for (const [markName, attributes] of entries(marks)) {
tr.addMark(from, end, assertGet(schema.marks, markName).create(attributes));
}
dispatch(tr);
return true;
};
} | the_stack |
import * as React from "react"
import * as ReactDOM from "react-dom"
import * as Immutable from "immutable"
import * as Option from "./option"
import {never} from './combinators'
import {Route, Url} from './router'
export type CmdCommon<A> = { cont:Cont<A>, context:()=>Context, key:string, debug_info:() => string }
export type UnitProps<A> = { kind:"unit", value:A } & CmdCommon<A>
export type BindProps<B,A> = { kind:"bind", once:boolean, p:C<B>, k:(_:B) => C<A>, className:string } & CmdCommon<A>
export type MapProps<A,B> = { kind:"map", p:C<A>, f:(_:A)=>B } & CmdCommon<B>
export type FilterProps<A> = { kind:"filter", p:C<A>, f:(_:A)=>boolean } & CmdCommon<A>
export type ShouldComponentUpdateProps<A,B> = { kind:"should component update", p:(_:A) => C<B>, f:(_:A)=>boolean, v:A } & CmdCommon<B>
export type Mode = "edit"|"view"
export type Context = {
logic_frame:number,
force_reload:(callback?:()=>void) => C<void>
current_page:C<void>
// pages:Immutable.Stack<C<void>>
set_page:<T>(x:T, new_page:Route<T>, callback?:()=>void) => C<void>
set_url:<T>(x:T, new_url:Url<T>, callback?:()=>void) => C<void>
push_route:(new_route:Route<{}>, callback?:()=>void) => C<void>
set_routes:(routes:Array<Route<{}>>, callback?:()=>void) => C<void>
// push_page:(new_page:ApplicationPage, callback?:()=>void) => C<void>
// pop_page:(callback?:()=>void) => C<void>
}
export type Cont<A> = (callback:() => void) => (_:A) => void
export type C<A> = {
comp:(ctxt:() => Context) => (cont:Cont<A>) => JSX.Element
then:<B>(key:string, k:(_:A)=>C<B>, className?:string, dbg?:()=>string)=>C<B>
// bind_once:<B>(key:string, k:(_:A)=>C<B>, dbg?:()=>string)=>C<B>
never:<B>(key?:string)=>C<B>
ignore:(key?:string)=>C<void>
ignore_with:<B>(x:B)=>C<B>
map:<B>(f:(_:A)=>B, key?:string, dbg?:()=>string)=>C<B>,
filter:(f:(_:A)=>boolean, key?:string, dbg?:()=>string)=>C<A>
}
export function make_C<A>(comp:(ctxt:()=>Context) => (cont:Cont<A>) => JSX.Element) : C<A> {
return {
comp:comp,
then:function<B>(this:C<A>, key:string, k:(_:A)=>C<B>, className?:string, dbg?:()=>string) : C<B> {
return bind<A,B>(key, this, k, className, dbg)
},
map:function<B>(this:C<A>, f:(_:A)=>B, key?:string, dbg?:()=>string) : C<B> {
return map<A,B>(key, dbg)(f)(this)
},
filter:function(this:C<A>, f:(_:A)=>boolean, key?:string, dbg?:()=>string) : C<A> {
return filter<A>(key, dbg)(f)(this)
},
// bind_once:function<B>(this:C<A>, key:string, k:(_:A)=>C<B>, dbg?:()=>string) : C<B> {
// return bind_once<A,B>(key, this, k, dbg)
// },
never:function<B>(this:C<A>, key?:string) : C<B> {
return never<A, B>(this, key)
},
ignore_with:function<B>(this:C<A>, x:B) : C<B> {
return this.then<B>(``, _ => unit<B>(x))
},
ignore:function(this:C<A>, key?:string) : C<void> {
return this.then(key, _ => unit<void>(null))
}
}
}
type UnitState<A> = {}
class Unit<A> extends React.Component<UnitProps<A>,UnitState<A>> {
constructor(props:UnitProps<A>,context:any) {
super(props, context)
this.state = {}
}
componentWillReceiveProps(new_props:UnitProps<A>) {
new_props.debug_info && console.log("New props:", new_props.debug_info(), new_props.value)
new_props.cont(() => {})(new_props.value)
}
componentWillMount() {
this.props.debug_info && console.log("Component will mount:", this.props.debug_info(), this.props.value)
this.props.cont(() => {})(this.props.value)
}
render(): JSX.Element[] {
this.props.debug_info && console.log("Render:", this.props.debug_info())
return []
}
}
export let unit = function<A>(x:A, key?:string, dbg?:() => string) : C<A> { return make_C<A>(ctxt => cont =>
(React.createElement<UnitProps<A>>(Unit, { kind:"unit", debug_info:dbg, value:x, context:ctxt, cont:cont, key:key }))) }
export type JoinProps<A> = { p:C<C<A>> } & CmdCommon<A>
export type JoinState<A> = { p_inner:"waiting"|JSX.Element, p_outer:JSX.Element }
class Join<A> extends React.Component<JoinProps<A>,JoinState<A>> {
constructor(props:JoinProps<A>,context:any) {
super(props, context)
this.state = { p_inner:"waiting", p_outer:props.p.comp(props.context)(cont => p_inner =>
this.setState({...this.state,
p_inner:p_inner.comp(this.props.context)(cb => x => this.props.cont(cb)(x))})) }
}
componentWillReceiveProps(new_props:JoinProps<A>) {
new_props.debug_info && console.log("New join props:", new_props.debug_info())
this.setState({ p_outer:new_props.p.comp(new_props.context)(cont => p_inner =>
this.setState({...this.state,
p_inner:p_inner.comp(new_props.context)(cb => x => new_props.cont(cb)(x))})) })
}
render() {
return <div>
{ this.state.p_outer }
{ this.state.p_inner == "waiting"
? []
: this.state.p_inner }
</div>
}
}
let join = function<A>(p:C<C<A>>, key?:string, dbg?:() => string) : C<A> {
return make_C<A>(ctxt => cont => React.createElement<JoinProps<A>>(Join, { p:p, context:ctxt, cont: cont, debug_info:dbg, key:key }))
}
type BindState<B,A> = { k:"waiting for p"|JSX.Element, p:"creating"|JSX.Element }
class Bind<B,A> extends React.Component<BindProps<B,A>,BindState<B,A>> {
constructor(props:BindProps<B,A>,context:any) {
super(props, context)
this.state = { k:"waiting for p", p:"creating" }
}
componentWillReceiveProps(new_props:BindProps<B,A>) {
this.props.debug_info && console.log("New props:", this.props.debug_info())
if (this.props.once)
this.setState({...this.state, p:"creating" })
else
this.setState({...this.state, p:new_props.p.comp(new_props.context)(callback => x =>
this.setState({...this.state,
k:new_props.k(x).comp(new_props.context)(callback => x =>
new_props.cont(callback)(x))}, callback)
)})
}
componentWillMount() {
this.setState({...this.state, p:this.props.p.comp(this.props.context)(callback => x =>
this.setState({...this.state,
k:this.props.k(x).comp(this.props.context)(callback => x =>
this.props.cont(callback)(x))}, callback)
)})
}
render() {
this.props.debug_info && console.log("Render:", this.props.debug_info())
return <div className={`bind ${this.props.className || ""}`}>
{
(this.state.k == "waiting for p" || !this.props.once) && this.state.p != "creating"
? this.state.p
: []
}
{
this.state.k != "waiting for p"
? this.state.k
: []
}
</div>
}
}
export let bind = function<A,B>(key:string, p:C<A>, k:((_:A)=>C<B>), className?:string, dbg?:() => string) : C<B> {
let q = p.map(k, `${key}_map`, dbg);
return join(q, `${key}_join`, dbg);
// return make_C<B>(ctxt => cont =>
// (React.createElement<BindProps<A,B>>(Bind,
// { kind:"bind", debug_info:dbg, p:p, k:k, once:false, cont:cont, context:ctxt, key:key, className:className })))
}
type MapState<A,B> = { p:"creating"|JSX.Element }
class Map<A,B> extends React.Component<MapProps<A,B>,MapState<A,B>> {
constructor(props:MapProps<A,B>,context:any) {
super(props, context)
this.state = { p:"creating" }
}
componentWillReceiveProps(new_props:MapProps<A,B>) {
this.props.debug_info && console.log("New props:", this.props.debug_info())
this.setState({...this.state, p:new_props.p.comp(new_props.context)(callback => x => new_props.cont(callback)(new_props.f(x)))})
}
componentWillMount() {
this.setState({...this.state, p:this.props.p.comp(this.props.context)(callback => x => this.props.cont(callback)(this.props.f(x)))})
}
render() {
this.props.debug_info && console.log("Render:", this.props.debug_info())
return this.state.p != "creating"
? this.state.p
: []
}
}
export let map = function<A,B>(key?:string, dbg?:() => string) : ((_:(_:A) => B) => (_:C<A>) => C<B>) {
return f => p =>
make_C<B>(ctxt => cont =>
React.createElement<MapProps<A,B>>(Map,
{ kind:"map", debug_info:dbg, p:p, f:f, context:ctxt, cont:cont, key:key }))
}
type FilterState<A> = { p:"creating"|JSX.Element }
class Filter<A> extends React.Component<FilterProps<A>,FilterState<A>> {
constructor(props:FilterProps<A>,context:any) {
super(props, context)
this.state = { p:"creating" }
}
componentWillReceiveProps(new_props:FilterProps<A>) {
this.props.debug_info && console.log("New props:", this.props.debug_info())
this.setState({...this.state, p:new_props.p.comp(new_props.context)(callback => x => { if (new_props.f(x)) { new_props.cont(callback)(x) } })})
}
componentWillMount() {
this.setState({...this.state, p:this.props.p.comp(this.props.context)(callback => x => { if (this.props.f(x)) { this.props.cont(callback)(x) } })})
}
render() {
this.props.debug_info && console.log("Render:", this.props.debug_info())
return this.state.p != "creating"
? this.state.p
: []
}
}
export let filter = function<A>(key?:string, dbg?:() => string) : ((_:(_:A) => boolean) => (_:C<A>) => C<A>) {
return f => p =>
make_C<A>(ctxt => cont =>
React.createElement<FilterProps<A>>(Filter,
{ kind:"filter", debug_info:dbg, p:p, f:f, context:ctxt, cont:cont, key:key }))
}
type ShouldComponentUpdateState<A,B> = { }
class ShouldComponentUpdate<A,B> extends React.Component<ShouldComponentUpdateProps<A,B>,ShouldComponentUpdateState<A,B>> {
constructor(props:ShouldComponentUpdateProps<A,B>,context:any) {
super(props, context)
}
shouldComponentUpdate(next_props:ShouldComponentUpdateProps<A,B>) : boolean {
return next_props.f(next_props.v)
}
componentWillReceiveProps(next_props:ShouldComponentUpdateProps<A,B>) {
this.props.debug_info && console.log("New props:", this.props.debug_info())
}
render() {
this.props.debug_info && console.log("Render:", this.props.debug_info())
// let s = this.props.v as any
// console.log(`rendering should component update with props ${this.props.debug_info()}`, JSON.stringify(s))
return this.props.p(this.props.v).comp(this.props.context)(cbk => y => this.props.cont(cbk)(y))
}
}
export let should_component_update = function<A,B>(key?:string, dbg?:() => string) : ((_:(_:A) => boolean) => (_:(_:A) => C<B>) => (_:A) => C<B>) {
return f => p => v =>
make_C<B>(ctxt => cont =>
React.createElement<ShouldComponentUpdateProps<A,B>>(ShouldComponentUpdate,
{ kind:"should component update", debug_info:dbg, p:p, f:f, context:ctxt, cont:cont, key:key, v:v }))
}
export type SimpleApplicationProps<A> = { p:C<A>, cont:(_:A)=>void }
export type SimpleApplicationState<A> = { context:Context }
export class SimpleApplication<A> extends React.Component<SimpleApplicationProps<A>, SimpleApplicationState<A>> {
constructor(props:SimpleApplicationProps<A>, context:any) {
super(props, context)
this.state = { context:this.context_from_props(this.props, unit<void>(null)) }
}
context_from_props(props:SimpleApplicationProps<A>, p:C<void>) : Context {
let self = this
return {
current_page:p,
logic_frame:0,
force_reload:(callback) =>
make_C<void>(ctxt => inner_callback => this.setState({...this.state, context:{...this.state.context, logic_frame:this.state.context.logic_frame+1}},
() => inner_callback(callback)(null)) || null),
set_page:function<T>(x:T, new_page:Route<T>, callback?:()=>void) {
return unit<void>(null)
},
set_url:function<T>(x:T, new_url:Url<T>, callback?:()=>void) {
return unit<void>(null)
},
push_route:function(route, callback?:()=>void) {
return unit<void>(null)
},
set_routes:function(routes, callback?:()=>void) {
return unit<void>(null)
}
}
}
render() {
return <div className="monadic-application" key={`application@${this.state.context.logic_frame}`}>
{
this.props.p.comp(() => this.state.context)(callback => x => this.props.cont(x))
}
</div>
}
}
export let simple_application = function<A>(p:C<A>, cont:(_:A)=>void) : JSX.Element {
return React.createElement<SimpleApplicationProps<A>>(SimpleApplication, { p:p, cont:cont })
} | the_stack |
import isChar from './is_char_in_unicode_block';
export function allowsIdeographicBreaking(chars: string) {
for (const char of chars) {
if (!charAllowsIdeographicBreaking(char.charCodeAt(0))) return false;
}
return true;
}
export function allowsVerticalWritingMode(chars: string) {
for (const char of chars) {
if (charHasUprightVerticalOrientation(char.charCodeAt(0))) return true;
}
return false;
}
export function allowsLetterSpacing(chars: string) {
for (const char of chars) {
if (!charAllowsLetterSpacing(char.charCodeAt(0))) return false;
}
return true;
}
export function charAllowsLetterSpacing(char: number) {
if (isChar['Arabic'](char)) return false;
if (isChar['Arabic Supplement'](char)) return false;
if (isChar['Arabic Extended-A'](char)) return false;
if (isChar['Arabic Presentation Forms-A'](char)) return false;
if (isChar['Arabic Presentation Forms-B'](char)) return false;
return true;
}
export function charAllowsIdeographicBreaking(char: number) {
// Return early for characters outside all ideographic ranges.
if (char < 0x2E80) return false;
if (isChar['Bopomofo Extended'](char)) return true;
if (isChar['Bopomofo'](char)) return true;
if (isChar['CJK Compatibility Forms'](char)) return true;
if (isChar['CJK Compatibility Ideographs'](char)) return true;
if (isChar['CJK Compatibility'](char)) return true;
if (isChar['CJK Radicals Supplement'](char)) return true;
if (isChar['CJK Strokes'](char)) return true;
if (isChar['CJK Symbols and Punctuation'](char)) return true;
if (isChar['CJK Unified Ideographs Extension A'](char)) return true;
if (isChar['CJK Unified Ideographs'](char)) return true;
if (isChar['Enclosed CJK Letters and Months'](char)) return true;
if (isChar['Halfwidth and Fullwidth Forms'](char)) return true;
if (isChar['Hiragana'](char)) return true;
if (isChar['Ideographic Description Characters'](char)) return true;
if (isChar['Kangxi Radicals'](char)) return true;
if (isChar['Katakana Phonetic Extensions'](char)) return true;
if (isChar['Katakana'](char)) return true;
if (isChar['Vertical Forms'](char)) return true;
if (isChar['Yi Radicals'](char)) return true;
if (isChar['Yi Syllables'](char)) return true;
return false;
}
// The following logic comes from
// <http://www.unicode.org/Public/12.0.0/ucd/VerticalOrientation.txt>.
// Keep it synchronized with
// <http://www.unicode.org/Public/UCD/latest/ucd/VerticalOrientation.txt>.
// The data file denotes with “U” or “Tu” any codepoint that may be drawn
// upright in vertical text but does not distinguish between upright and
// “neutral” characters.
// Blocks in the Unicode supplementary planes are excluded from this module due
// to <https://github.com/mapbox/mapbox-gl/issues/29>.
/**
* Returns true if the given Unicode codepoint identifies a character with
* upright orientation.
*
* A character has upright orientation if it is drawn upright (unrotated)
* whether the line is oriented horizontally or vertically, even if both
* adjacent characters can be rotated. For example, a Chinese character is
* always drawn upright. An uprightly oriented character causes an adjacent
* “neutral” character to be drawn upright as well.
* @private
*/
export function charHasUprightVerticalOrientation(char: number) {
if (char === 0x02EA /* modifier letter yin departing tone mark */ ||
char === 0x02EB /* modifier letter yang departing tone mark */) {
return true;
}
// Return early for characters outside all ranges whose characters remain
// upright in vertical writing mode.
if (char < 0x1100) return false;
if (isChar['Bopomofo Extended'](char)) return true;
if (isChar['Bopomofo'](char)) return true;
if (isChar['CJK Compatibility Forms'](char)) {
if (!((char >= 0xFE49 /* dashed overline */ && char <= 0xFE4F) /* wavy low line */)) {
return true;
}
}
if (isChar['CJK Compatibility Ideographs'](char)) return true;
if (isChar['CJK Compatibility'](char)) return true;
if (isChar['CJK Radicals Supplement'](char)) return true;
if (isChar['CJK Strokes'](char)) return true;
if (isChar['CJK Symbols and Punctuation'](char)) {
if (!((char >= 0x3008 /* left angle bracket */ && char <= 0x3011) /* right black lenticular bracket */) &&
!((char >= 0x3014 /* left tortoise shell bracket */ && char <= 0x301F) /* low double prime quotation mark */) &&
char !== 0x3030 /* wavy dash */) {
return true;
}
}
if (isChar['CJK Unified Ideographs Extension A'](char)) return true;
if (isChar['CJK Unified Ideographs'](char)) return true;
if (isChar['Enclosed CJK Letters and Months'](char)) return true;
if (isChar['Hangul Compatibility Jamo'](char)) return true;
if (isChar['Hangul Jamo Extended-A'](char)) return true;
if (isChar['Hangul Jamo Extended-B'](char)) return true;
if (isChar['Hangul Jamo'](char)) return true;
if (isChar['Hangul Syllables'](char)) return true;
if (isChar['Hiragana'](char)) return true;
if (isChar['Ideographic Description Characters'](char)) return true;
if (isChar['Kanbun'](char)) return true;
if (isChar['Kangxi Radicals'](char)) return true;
if (isChar['Katakana Phonetic Extensions'](char)) return true;
if (isChar['Katakana'](char)) {
if (char !== 0x30FC /* katakana-hiragana prolonged sound mark */) {
return true;
}
}
if (isChar['Halfwidth and Fullwidth Forms'](char)) {
if (char !== 0xFF08 /* fullwidth left parenthesis */ &&
char !== 0xFF09 /* fullwidth right parenthesis */ &&
char !== 0xFF0D /* fullwidth hyphen-minus */ &&
!((char >= 0xFF1A /* fullwidth colon */ && char <= 0xFF1E) /* fullwidth greater-than sign */) &&
char !== 0xFF3B /* fullwidth left square bracket */ &&
char !== 0xFF3D /* fullwidth right square bracket */ &&
char !== 0xFF3F /* fullwidth low line */ &&
!(char >= 0xFF5B /* fullwidth left curly bracket */ && char <= 0xFFDF) &&
char !== 0xFFE3 /* fullwidth macron */ &&
!(char >= 0xFFE8 /* halfwidth forms light vertical */ && char <= 0xFFEF)) {
return true;
}
}
if (isChar['Small Form Variants'](char)) {
if (!((char >= 0xFE58 /* small em dash */ && char <= 0xFE5E) /* small right tortoise shell bracket */) &&
!((char >= 0xFE63 /* small hyphen-minus */ && char <= 0xFE66) /* small equals sign */)) {
return true;
}
}
if (isChar['Unified Canadian Aboriginal Syllabics'](char)) return true;
if (isChar['Unified Canadian Aboriginal Syllabics Extended'](char)) return true;
if (isChar['Vertical Forms'](char)) return true;
if (isChar['Yijing Hexagram Symbols'](char)) return true;
if (isChar['Yi Syllables'](char)) return true;
if (isChar['Yi Radicals'](char)) return true;
return false;
}
/**
* Returns true if the given Unicode codepoint identifies a character with
* neutral orientation.
*
* A character has neutral orientation if it may be drawn rotated or unrotated
* when the line is oriented vertically, depending on the orientation of the
* adjacent characters. For example, along a verticlly oriented line, the vulgar
* fraction ½ is drawn upright among Chinese characters but rotated among Latin
* letters. A neutrally oriented character does not influence whether an
* adjacent character is drawn upright or rotated.
* @private
*/
export function charHasNeutralVerticalOrientation(char: number) {
if (isChar['Latin-1 Supplement'](char)) {
if (char === 0x00A7 /* section sign */ ||
char === 0x00A9 /* copyright sign */ ||
char === 0x00AE /* registered sign */ ||
char === 0x00B1 /* plus-minus sign */ ||
char === 0x00BC /* vulgar fraction one quarter */ ||
char === 0x00BD /* vulgar fraction one half */ ||
char === 0x00BE /* vulgar fraction three quarters */ ||
char === 0x00D7 /* multiplication sign */ ||
char === 0x00F7 /* division sign */) {
return true;
}
}
if (isChar['General Punctuation'](char)) {
if (char === 0x2016 /* double vertical line */ ||
char === 0x2020 /* dagger */ ||
char === 0x2021 /* double dagger */ ||
char === 0x2030 /* per mille sign */ ||
char === 0x2031 /* per ten thousand sign */ ||
char === 0x203B /* reference mark */ ||
char === 0x203C /* double exclamation mark */ ||
char === 0x2042 /* asterism */ ||
char === 0x2047 /* double question mark */ ||
char === 0x2048 /* question exclamation mark */ ||
char === 0x2049 /* exclamation question mark */ ||
char === 0x2051 /* two asterisks aligned vertically */) {
return true;
}
}
if (isChar['Letterlike Symbols'](char)) return true;
if (isChar['Number Forms'](char)) return true;
if (isChar['Miscellaneous Technical'](char)) {
if ((char >= 0x2300 /* diameter sign */ && char <= 0x2307 /* wavy line */) ||
(char >= 0x230C /* bottom right crop */ && char <= 0x231F /* bottom right corner */) ||
(char >= 0x2324 /* up arrowhead between two horizontal bars */ && char <= 0x2328 /* keyboard */) ||
char === 0x232B /* erase to the left */ ||
(char >= 0x237D /* shouldered open box */ && char <= 0x239A /* clear screen symbol */) ||
(char >= 0x23BE /* dentistry symbol light vertical and top right */ && char <= 0x23CD /* square foot */) ||
char === 0x23CF /* eject symbol */ ||
(char >= 0x23D1 /* metrical breve */ && char <= 0x23DB /* fuse */) ||
(char >= 0x23E2 /* white trapezium */ && char <= 0x23FF)) {
return true;
}
}
if (isChar['Control Pictures'](char) && char !== 0x2423 /* open box */) return true;
if (isChar['Optical Character Recognition'](char)) return true;
if (isChar['Enclosed Alphanumerics'](char)) return true;
if (isChar['Geometric Shapes'](char)) return true;
if (isChar['Miscellaneous Symbols'](char)) {
if (!((char >= 0x261A /* black left pointing index */ && char <= 0x261F) /* white down pointing index */)) {
return true;
}
}
if (isChar['Miscellaneous Symbols and Arrows'](char)) {
if ((char >= 0x2B12 /* square with top half black */ && char <= 0x2B2F /* white vertical ellipse */) ||
(char >= 0x2B50 /* white medium star */ && char <= 0x2B59 /* heavy circled saltire */) ||
(char >= 0x2BB8 /* upwards white arrow from bar with horizontal bar */ && char <= 0x2BEB)) {
return true;
}
}
if (isChar['CJK Symbols and Punctuation'](char)) return true;
if (isChar['Katakana'](char)) return true;
if (isChar['Private Use Area'](char)) return true;
if (isChar['CJK Compatibility Forms'](char)) return true;
if (isChar['Small Form Variants'](char)) return true;
if (isChar['Halfwidth and Fullwidth Forms'](char)) return true;
if (char === 0x221E /* infinity */ ||
char === 0x2234 /* therefore */ ||
char === 0x2235 /* because */ ||
(char >= 0x2700 /* black safety scissors */ && char <= 0x2767 /* rotated floral heart bullet */) ||
(char >= 0x2776 /* dingbat negative circled digit one */ && char <= 0x2793 /* dingbat negative circled sans-serif number ten */) ||
char === 0xFFFC /* object replacement character */ ||
char === 0xFFFD /* replacement character */) {
return true;
}
return false;
}
/**
* Returns true if the given Unicode codepoint identifies a character with
* rotated orientation.
*
* A character has rotated orientation if it is drawn rotated when the line is
* oriented vertically, even if both adjacent characters are upright. For
* example, a Latin letter is drawn rotated along a vertical line. A rotated
* character causes an adjacent “neutral” character to be drawn rotated as well.
* @private
*/
export function charHasRotatedVerticalOrientation(char: number) {
return !(charHasUprightVerticalOrientation(char) ||
charHasNeutralVerticalOrientation(char));
}
export function charInComplexShapingScript(char: number) {
return isChar['Arabic'](char) ||
isChar['Arabic Supplement'](char) ||
isChar['Arabic Extended-A'](char) ||
isChar['Arabic Presentation Forms-A'](char) ||
isChar['Arabic Presentation Forms-B'](char);
}
export function charInRTLScript(char: number) {
// Main blocks for Hebrew, Arabic, Thaana and other RTL scripts
return (char >= 0x0590 && char <= 0x08FF) ||
isChar['Arabic Presentation Forms-A'](char) ||
isChar['Arabic Presentation Forms-B'](char);
}
export function charInSupportedScript(char: number, canRenderRTL: boolean) {
// This is a rough heuristic: whether we "can render" a script
// actually depends on the properties of the font being used
// and whether differences from the ideal rendering are considered
// semantically significant.
// Even in Latin script, we "can't render" combinations such as the fi
// ligature, but we don't consider that semantically significant.
if (!canRenderRTL && charInRTLScript(char)) {
return false;
}
if ((char >= 0x0900 && char <= 0x0DFF) ||
// Main blocks for Indic scripts and Sinhala
(char >= 0x0F00 && char <= 0x109F) ||
// Main blocks for Tibetan and Myanmar
isChar['Khmer'](char)) {
// These blocks cover common scripts that require
// complex text shaping, based on unicode script metadata:
// http://www.unicode.org/repos/cldr/trunk/common/properties/scriptMetadata.txt
// where "Web Rank <= 32" "Shaping Required = YES"
return false;
}
return true;
}
export function stringContainsRTLText(chars: string): boolean {
for (const char of chars) {
if (charInRTLScript(char.charCodeAt(0))) {
return true;
}
}
return false;
}
export function isStringInSupportedScript(chars: string, canRenderRTL: boolean) {
for (const char of chars) {
if (!charInSupportedScript(char.charCodeAt(0), canRenderRTL)) {
return false;
}
}
return true;
} | the_stack |
import * as msRest from "@azure/ms-rest-js";
import * as msRestAzure from "@azure/ms-rest-azure-js";
import * as Models from "../models";
import * as Mappers from "../models/networkExperimentProfilesMappers";
import * as Parameters from "../models/parameters";
import { FrontDoorManagementClientContext } from "../frontDoorManagementClientContext";
/** Class representing a NetworkExperimentProfiles. */
export class NetworkExperimentProfiles {
private readonly client: FrontDoorManagementClientContext;
/**
* Create a NetworkExperimentProfiles.
* @param {FrontDoorManagementClientContext} client Reference to the service client.
*/
constructor(client: FrontDoorManagementClientContext) {
this.client = client;
}
/**
* @summary Gets a list of Network Experiment Profiles under a subscription
* @param [options] The optional parameters
* @returns Promise<Models.NetworkExperimentProfilesListResponse>
*/
list(options?: msRest.RequestOptionsBase): Promise<Models.NetworkExperimentProfilesListResponse>;
/**
* @param callback The callback
*/
list(callback: msRest.ServiceCallback<Models.ProfileList>): void;
/**
* @param options The optional parameters
* @param callback The callback
*/
list(options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ProfileList>): void;
list(options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ProfileList>, callback?: msRest.ServiceCallback<Models.ProfileList>): Promise<Models.NetworkExperimentProfilesListResponse> {
return this.client.sendOperationRequest(
{
options
},
listOperationSpec,
callback) as Promise<Models.NetworkExperimentProfilesListResponse>;
}
/**
* @summary Gets a list of Network Experiment Profiles within a resource group under a subscription
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param [options] The optional parameters
* @returns Promise<Models.NetworkExperimentProfilesListByResourceGroupResponse>
*/
listByResourceGroup(resourceGroupName: string, options?: msRest.RequestOptionsBase): Promise<Models.NetworkExperimentProfilesListByResourceGroupResponse>;
/**
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param callback The callback
*/
listByResourceGroup(resourceGroupName: string, callback: msRest.ServiceCallback<Models.ProfileList>): void;
/**
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param options The optional parameters
* @param callback The callback
*/
listByResourceGroup(resourceGroupName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ProfileList>): void;
listByResourceGroup(resourceGroupName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ProfileList>, callback?: msRest.ServiceCallback<Models.ProfileList>): Promise<Models.NetworkExperimentProfilesListByResourceGroupResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
options
},
listByResourceGroupOperationSpec,
callback) as Promise<Models.NetworkExperimentProfilesListByResourceGroupResponse>;
}
/**
* @summary Gets an NetworkExperiment Profile by ProfileName
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param profileName The Profile identifier associated with the Tenant and Partner
* @param [options] The optional parameters
* @returns Promise<Models.NetworkExperimentProfilesGetResponse>
*/
get(resourceGroupName: string, profileName: string, options?: msRest.RequestOptionsBase): Promise<Models.NetworkExperimentProfilesGetResponse>;
/**
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param profileName The Profile identifier associated with the Tenant and Partner
* @param callback The callback
*/
get(resourceGroupName: string, profileName: string, callback: msRest.ServiceCallback<Models.Profile>): void;
/**
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param profileName The Profile identifier associated with the Tenant and Partner
* @param options The optional parameters
* @param callback The callback
*/
get(resourceGroupName: string, profileName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.Profile>): void;
get(resourceGroupName: string, profileName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.Profile>, callback?: msRest.ServiceCallback<Models.Profile>): Promise<Models.NetworkExperimentProfilesGetResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
profileName,
options
},
getOperationSpec,
callback) as Promise<Models.NetworkExperimentProfilesGetResponse>;
}
/**
* @summary Creates an NetworkExperiment Profile
* @param profileName The Profile identifier associated with the Tenant and Partner
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param parameters An Network Experiment Profile
* @param [options] The optional parameters
* @returns Promise<Models.NetworkExperimentProfilesCreateOrUpdateResponse>
*/
createOrUpdate(profileName: string, resourceGroupName: string, parameters: Models.Profile, options?: msRest.RequestOptionsBase): Promise<Models.NetworkExperimentProfilesCreateOrUpdateResponse> {
return this.beginCreateOrUpdate(profileName,resourceGroupName,parameters,options)
.then(lroPoller => lroPoller.pollUntilFinished()) as Promise<Models.NetworkExperimentProfilesCreateOrUpdateResponse>;
}
/**
* Updates an NetworkExperimentProfiles
* @summary Updates an NetworkExperimentProfiles by NetworkExperimentProfile name
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param profileName The Profile identifier associated with the Tenant and Partner
* @param parameters The Profile Update Model
* @param [options] The optional parameters
* @returns Promise<Models.NetworkExperimentProfilesUpdateResponse>
*/
update(resourceGroupName: string, profileName: string, parameters: Models.ProfileUpdateModel, options?: msRest.RequestOptionsBase): Promise<Models.NetworkExperimentProfilesUpdateResponse> {
return this.beginUpdate(resourceGroupName,profileName,parameters,options)
.then(lroPoller => lroPoller.pollUntilFinished()) as Promise<Models.NetworkExperimentProfilesUpdateResponse>;
}
/**
* @summary Deletes an NetworkExperiment Profile by ProfileName
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param profileName The Profile identifier associated with the Tenant and Partner
* @param [options] The optional parameters
* @returns Promise<msRest.RestResponse>
*/
deleteMethod(resourceGroupName: string, profileName: string, options?: msRest.RequestOptionsBase): Promise<msRest.RestResponse> {
return this.beginDeleteMethod(resourceGroupName,profileName,options)
.then(lroPoller => lroPoller.pollUntilFinished());
}
/**
* @summary Creates an NetworkExperiment Profile
* @param profileName The Profile identifier associated with the Tenant and Partner
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param parameters An Network Experiment Profile
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginCreateOrUpdate(profileName: string, resourceGroupName: string, parameters: Models.Profile, options?: msRest.RequestOptionsBase): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
profileName,
resourceGroupName,
parameters,
options
},
beginCreateOrUpdateOperationSpec,
options);
}
/**
* Updates an NetworkExperimentProfiles
* @summary Updates an NetworkExperimentProfiles by NetworkExperimentProfile name
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param profileName The Profile identifier associated with the Tenant and Partner
* @param parameters The Profile Update Model
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginUpdate(resourceGroupName: string, profileName: string, parameters: Models.ProfileUpdateModel, options?: msRest.RequestOptionsBase): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
resourceGroupName,
profileName,
parameters,
options
},
beginUpdateOperationSpec,
options);
}
/**
* @summary Deletes an NetworkExperiment Profile by ProfileName
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param profileName The Profile identifier associated with the Tenant and Partner
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginDeleteMethod(resourceGroupName: string, profileName: string, options?: msRest.RequestOptionsBase): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
resourceGroupName,
profileName,
options
},
beginDeleteMethodOperationSpec,
options);
}
/**
* @summary Gets a list of Network Experiment Profiles under a subscription
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param [options] The optional parameters
* @returns Promise<Models.NetworkExperimentProfilesListNextResponse>
*/
listNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.NetworkExperimentProfilesListNextResponse>;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param callback The callback
*/
listNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.ProfileList>): void;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param options The optional parameters
* @param callback The callback
*/
listNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ProfileList>): void;
listNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ProfileList>, callback?: msRest.ServiceCallback<Models.ProfileList>): Promise<Models.NetworkExperimentProfilesListNextResponse> {
return this.client.sendOperationRequest(
{
nextPageLink,
options
},
listNextOperationSpec,
callback) as Promise<Models.NetworkExperimentProfilesListNextResponse>;
}
/**
* @summary Gets a list of Network Experiment Profiles within a resource group under a subscription
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param [options] The optional parameters
* @returns Promise<Models.NetworkExperimentProfilesListByResourceGroupNextResponse>
*/
listByResourceGroupNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.NetworkExperimentProfilesListByResourceGroupNextResponse>;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param callback The callback
*/
listByResourceGroupNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.ProfileList>): void;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param options The optional parameters
* @param callback The callback
*/
listByResourceGroupNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ProfileList>): void;
listByResourceGroupNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ProfileList>, callback?: msRest.ServiceCallback<Models.ProfileList>): Promise<Models.NetworkExperimentProfilesListByResourceGroupNextResponse> {
return this.client.sendOperationRequest(
{
nextPageLink,
options
},
listByResourceGroupNextOperationSpec,
callback) as Promise<Models.NetworkExperimentProfilesListByResourceGroupNextResponse>;
}
}
// Operation Specifications
const serializer = new msRest.Serializer(Mappers);
const listOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/providers/Microsoft.Network/NetworkExperimentProfiles",
urlParameters: [
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.ProfileList
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const listByResourceGroupOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/NetworkExperimentProfiles",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName
],
queryParameters: [
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.ProfileList
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const getOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/NetworkExperimentProfiles/{profileName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.profileName
],
queryParameters: [
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.Profile
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const beginCreateOrUpdateOperationSpec: msRest.OperationSpec = {
httpMethod: "PUT",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/NetworkExperimentProfiles/{profileName}",
urlParameters: [
Parameters.profileName,
Parameters.subscriptionId,
Parameters.resourceGroupName
],
queryParameters: [
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "parameters",
mapper: {
...Mappers.Profile,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.Profile
},
201: {
bodyMapper: Mappers.Profile
},
202: {
bodyMapper: Mappers.Profile
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const beginUpdateOperationSpec: msRest.OperationSpec = {
httpMethod: "PATCH",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/NetworkExperimentProfiles/{profileName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.profileName
],
queryParameters: [
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "parameters",
mapper: {
...Mappers.ProfileUpdateModel,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.Profile
},
202: {
bodyMapper: Mappers.Profile
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const beginDeleteMethodOperationSpec: msRest.OperationSpec = {
httpMethod: "DELETE",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/NetworkExperimentProfiles/{profileName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.profileName
],
queryParameters: [
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {},
202: {},
204: {},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const listNextOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
baseUrl: "https://management.azure.com",
path: "{nextLink}",
urlParameters: [
Parameters.nextPageLink
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.ProfileList
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const listByResourceGroupNextOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
baseUrl: "https://management.azure.com",
path: "{nextLink}",
urlParameters: [
Parameters.nextPageLink
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.ProfileList
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
}; | the_stack |
import { DataColumnDescriptor, DataColumnList } from './data_column';
import { DataRow } from './data_row';
import { DataLoader } from './data_loader';
import { DataType } from '../types/data_type';
import { utils } from '../utils/utils';
export interface EasyDataTableOptions {
chunkSize?: number;
elasticChunks?: boolean;
loader?: DataLoader;
columns?: DataColumnDescriptor[];
rows?: any[];
}
type GetRowsPageParams = { page: number, pageSize: number };
type GetRowsOffsetParams = { offset: number, limit?: number };
export type GetRowsParams = GetRowsPageParams | GetRowsOffsetParams;
interface CachedChunk {
offset: number;
rows: Array<DataRow>;
}
interface ChunkMap {
[index: number]: CachedChunk;
}
export class EasyDataTable {
public id: string;
private _chunkSize: number = 1000;
constructor(options?: EasyDataTableOptions) {
options = options || {};
this._chunkSize = options.chunkSize || this._chunkSize;
this._elasticChunks = options.elasticChunks || this._elasticChunks;
this.loader = options.loader;
this._columns = new DataColumnList();
if (options.columns) {
for(const colDesc of options.columns) {
this._columns.add(colDesc);
}
}
if (options.rows) {
for (const rowData of options.rows) {
const row = this.createRow(rowData);
this.addRow(row);
}
}
this.needTotal = !this._elasticChunks;
}
private _columns: DataColumnList;
public get columns() : DataColumnList {
return this._columns;
}
public get chunkSize(): number {
return this._chunkSize;
}
public set chunkSize(value: number) {
this._chunkSize = value;
this.total = 0;
this.needTotal = !this.elasticChunks;
this.chunkMap = {};
}
private _elasticChunks = false;
public get elasticChunks(): boolean {
return this._elasticChunks;
}
public set elasticChunks(value: boolean) {
this._elasticChunks = value;
this.total = 0;
this.needTotal = !this.elasticChunks;
this.chunkMap = {};
}
// object keeps number keys sorted
private chunkMap: ChunkMap = {};
private total: number = 0;
private loader?: DataLoader | null = null;
private needTotal = true;
public getRows(params?: GetRowsParams): Promise<Array<DataRow>> {
let fromIndex = 0, count = this._chunkSize;
if (params) {
if ('page' in params) {
fromIndex = params.pageSize * (params.page - 1);
count = params.pageSize
}
else {
fromIndex = params.offset;
count = params.limit;
}
}
let endIndex = fromIndex + count; //the first index of the next page
//if we don't calculate total on this request
if (!this.needTotal && !this.elasticChunks) {
if (fromIndex >= this.total) {
return Promise.resolve([]);
}
if (endIndex > this.total) {
endIndex = this.total;
}
}
const lbChunk = Math.trunc(fromIndex / this._chunkSize);
const ubChunk = Math.trunc((endIndex - 1) / this._chunkSize);
let allChunksCached = true;
for (let i = lbChunk; i <= ubChunk; i++) {
if (!this.chunkMap[i]) {
allChunksCached = false;
break;
}
}
if (allChunksCached) {
let resultArr: DataRow[] = [];
for (let i = lbChunk; i <= ubChunk; i++) {
resultArr = resultArr.concat(this.chunkMap[i].rows)
}
const firstChunkOffset = this.chunkMap[lbChunk].offset;
return Promise.resolve(
resultArr.slice(fromIndex - firstChunkOffset, endIndex - firstChunkOffset)
);
}
//if loader is not defined
if (!this.loader) {
throw `Loader is not defined. Can't get the rows from ${fromIndex} to ${endIndex}`;
}
// we need total only for the first request
const needTotal = this.needTotal;
if (this.needTotal) {
this.needTotal = false;
}
let limit = this._chunkSize * (ubChunk - lbChunk + 1);
if (this.elasticChunks)
limit++;
return this.loader.loadChunk({
offset: lbChunk * this._chunkSize,
limit: limit,
needTotal: needTotal
})
.then(result => {
const chunks = result.table.getCachedChunks();
if (needTotal) {
this.total = result.total;
if (endIndex > this.total) {
endIndex = this.total;
}
}
let index = lbChunk;
for (const chunk of chunks) {
this.chunkMap[index] = {
offset: index * this._chunkSize,
rows: chunk.rows
}
index++;
}
if (this.elasticChunks) {
const count = result.table.getCachedCount();
if (count > 0) {
const chunk = this.chunkMap[index - 1];
if (count === limit) {
chunk.rows.splice(chunk.rows.length - 1, 1);
if (chunk.rows.length == 0) {
delete this.chunkMap[index - 1];
}
}
if (count < limit) {
this.total = this.getCachedCount();
}
}
}
let resultArr: DataRow[] = [];
for (let i = lbChunk; i <= ubChunk; i++) {
resultArr = resultArr.concat(this.chunkMap[i].rows)
}
const firstChunkOffset = this.chunkMap[lbChunk].offset;
return resultArr.slice(fromIndex - firstChunkOffset, endIndex - firstChunkOffset);
});
}
public getRow(index: number): Promise<DataRow | null> {
return this.getRows({ offset: index, limit: 1 })
.then(rows => rows.length > 0 ? rows[0] : null);
}
public getTotal(): number {
return this.total;
}
public setTotal(total : number) : void {
this.total = total;
this.needTotal = false;
this.chunkMap = {};
}
public getCachedCount(): number {
return this.getCachedRows().length;
}
public clear() {
this.columns.clear();
this.chunkMap = {};
this.total = 0;
this.needTotal = !this._elasticChunks;
}
protected createRow(dataOrRow?: DataRow | any): DataRow {
const dateIdx = this._columns.getDateColumnIndexes();
const values: any[] = new Array(this._columns.count);
const getValue = dataOrRow instanceof DataRow
? (colId) => dataOrRow.getValue(colId)
: (colId) => dataOrRow[colId];
if (dataOrRow) {
this.columns.getItems().forEach((column) => {
const value = getValue(column.id);
const index = this.columns.getIndex(column.id);
values[index] = (dateIdx.indexOf(index) >= 0)
? this.mapDate(value, column.type)
: value;
});
}
return new DataRow(this._columns, values);
}
private mapDate(value: any, dtype: DataType): Date {
if (value) {
let result = new Date(value);
if (isNaN(result.getTime())
&& dtype == DataType.Time) {
result = utils.strToTime(value);
}
return result;
}
return null;
}
public addRow(rowOrValue: any[] | DataRow) : DataRow {
let newRow: DataRow;
if (Array.isArray(rowOrValue)) {
const dateIdx = this._columns.getDateColumnIndexes();
if (dateIdx.length > 0) {
for(const idx of dateIdx) {
if (rowOrValue[idx]) {
rowOrValue[idx] = this.mapDate(rowOrValue[idx], this._columns.get(idx).type);
}
}
}
newRow = new DataRow(this._columns, rowOrValue);
}
else {
newRow = this.createRow(rowOrValue);
}
let lastChunk = this.getLastChunk();
if (!lastChunk || lastChunk.rows.length >= this._chunkSize) {
lastChunk = this.createChunk();
}
lastChunk.rows.push(newRow);
const cachedTotal = this.getCachedCount();
if (cachedTotal > this.total) {
this.total = cachedTotal;
}
return newRow;
}
protected createChunk(index?: number): CachedChunk {
if (typeof index == 'undefined') {
index = this.getLastChunkIndex() + 1;
}
const chunk: CachedChunk = { offset: index * this._chunkSize, rows: [] };
this.chunkMap[index] = chunk;
return chunk;
}
private getLastChunkIndex(): number {
const keys = Object.keys(this.chunkMap);
return keys.length > 0
? parseInt(keys[keys.length - 1])
: -1;
}
private getLastChunk(): CachedChunk | null {
const index = this.getLastChunkIndex();
return index > -1
? this.chunkMap[index]
: null;
}
public getCachedRows(): DataRow[] {
return this.getCachedChunks()
.reduce((acc, v) => acc.concat(v.rows), []);
}
public getCachedChunks(): CachedChunk[] {
return Object.values(this.chunkMap);
}
public totalIsKnown() : boolean {
if (this.elasticChunks) {
const count = this.getCachedCount();
return count === this.total;
}
return !this.needTotal;
}
} | the_stack |
import Action from "@tasit/action";
import GnosisSafe from "./GnosisSafe";
import Account from "@tasit/account";
import TasitContracts from "@tasit/contracts";
import helpers from "@tasit/test-helpers";
const { standards } = Action;
const { ERC20, ERC721 } = standards;
const {
constants,
mineBlocks,
accounts,
bigNumberify,
expectMinimumEtherBalances,
expectExactTokenBalances,
expectExactEtherBalances,
ProviderFactory,
erc20Faucet,
erc721Faucet,
etherFaucet,
} = helpers;
const { local } = TasitContracts;
const { GnosisSafe: GnosisSafeInfo, MyERC20, MyERC721 } = local;
const { address: GNOSIS_SAFE_ADDRESS } = GnosisSafeInfo;
const { address: ERC20_ADDRESS } = MyERC20;
const { address: NFT_ADDRESS } = MyERC721;
const { ZERO, ONE } = constants;
const SMALL_AMOUNT = bigNumberify(`${1e17}`); // 0.1 ethers
const provider = ProviderFactory.getProvider();
describe("GnosisSafe", () => {
let minter;
let gnosisSafeOwner;
let ephemeralAccount;
let someone;
let gnosisSafe;
let erc20;
let nft;
beforeAll(async () => {
// That account is funded with ethers and is owner of all token contracts deployed
[minter] = accounts;
gnosisSafeOwner = accounts[9];
ephemeralAccount = Account.create();
someone = Account.create();
// Contract deployment setup with john (accounts[9]) as the only owner
// To change that, edit the file "contract/3rd-parties/gnosis/scripts/2_deploy_contracts.ts"
gnosisSafe = new GnosisSafe(GNOSIS_SAFE_ADDRESS);
erc20 = new ERC20(ERC20_ADDRESS);
nft = new ERC721(NFT_ADDRESS);
});
beforeEach(async () => {
await provider.ready;
const { address: someoneAddress } = someone;
// Expect an already-funded Gnosis Safe wallet
await expectMinimumEtherBalances(provider, [GNOSIS_SAFE_ADDRESS], [ONE]);
await expectExactTokenBalances(erc20, [GNOSIS_SAFE_ADDRESS], [ZERO]);
await expectExactTokenBalances(nft, [GNOSIS_SAFE_ADDRESS], [ZERO]);
await expectExactEtherBalances(provider, [someoneAddress], [ZERO]);
await expectExactTokenBalances(erc20, [someoneAddress], [ZERO]);
await expectExactTokenBalances(nft, [someoneAddress], [ZERO]);
gnosisSafe.setSigners([]);
gnosisSafe.removeAccount();
erc20.removeAccount();
nft.removeAccount();
});
afterEach(async () => {
expect(erc20.subscribedEventNames()).toHaveLength(0);
expect(gnosisSafe.subscribedEventNames()).toHaveLength(0);
expect(provider._events).toHaveLength(0);
});
describe("test cases that need ETH deposit to the contract-based account", () => {
it("contract-based account should send ETHs to someone", async () => {
const { address: toAddress } = someone;
gnosisSafe.setSigners([gnosisSafeOwner]);
gnosisSafe.setAccount(gnosisSafeOwner);
const action = gnosisSafe.transferEther(toAddress, ONE);
await action.send();
await action.waitForOneConfirmation();
await expectExactEtherBalances(provider, [toAddress], [ONE]);
});
describe("test cases with more than one signer", () => {
// contract-based accounts' owner should add an account as signer
beforeEach(async () => {
const { address: ownerAddress } = gnosisSafeOwner;
const ownersBefore = await gnosisSafe.getOwners();
expect(ownersBefore).toEqual([ownerAddress]);
const thresholdBefore = await gnosisSafe.getThreshold();
expect(`${thresholdBefore}`).toBe(`1`);
const { address: newSignerAddress } = ephemeralAccount;
const newThreshold = `2`;
gnosisSafe.setSigners([gnosisSafeOwner]);
gnosisSafe.setAccount(gnosisSafeOwner);
const action = gnosisSafe.addSignerWithThreshold(
newSignerAddress,
newThreshold
);
await action.send();
await action.waitForOneConfirmation();
const ownersAfter = await gnosisSafe.getOwners();
expect(ownersAfter).toEqual([newSignerAddress, ownerAddress]);
const thresholdAfter = await gnosisSafe.getThreshold();
expect(`${thresholdAfter}`).toBe(newThreshold);
}
);
it(
"shouldn't be able to execute transfer with insufficient signers",
async () => {
const balanceBefore = await provider.getBalance(GNOSIS_SAFE_ADDRESS);
const { address: toAddress } = someone;
gnosisSafe.setSigners([gnosisSafeOwner]);
gnosisSafe.setAccount(gnosisSafeOwner);
const action = gnosisSafe.transferEther(toAddress, ONE);
const errorListener = sinon.fake((error) => {
const { message } = error;
console.info(message);
action.unsubscribe();
});
// Note: Some error events are being triggered only from the confirmationListener
// See more: https://github.com/tasitlabs/tasit-sdk/issues/253
const confirmationListener = () => {
// do nothing
};
action.on("error", errorListener);
action.on("confirmation", confirmationListener);
await action.send();
await action.waitForOneConfirmation();
await mineBlocks(provider, 1);
expect(errorListener.callCount).toBe(1);
await expectExactEtherBalances(
provider,
[GNOSIS_SAFE_ADDRESS],
[balanceBefore]
);
}
);
});
});
describe("test cases that need ERC20 deposit to the contract-based account", () => {
// faucet
beforeEach(async () => {
await erc20Faucet(erc20, minter, GNOSIS_SAFE_ADDRESS, ONE);
await expectExactTokenBalances(erc20, [GNOSIS_SAFE_ADDRESS], [ONE]);
});
it(
"contract-based account should send ERC20 tokens to someone",
async () => {
const tokenAddress = ERC20_ADDRESS;
const { address: toAddress } = someone;
gnosisSafe.setSigners([gnosisSafeOwner]);
gnosisSafe.setAccount(gnosisSafeOwner);
const action = gnosisSafe.transferERC20(tokenAddress, toAddress, ONE);
await action.send();
await action.waitForOneConfirmation();
await expectExactTokenBalances(erc20, [GNOSIS_SAFE_ADDRESS], [ZERO]);
await expectExactTokenBalances(erc20, [toAddress], [ONE]);
}
);
describe("spending by an ephemeral account", () => {
// ethers to the ephemeral account pay for gas
beforeEach(async () => {
const { address: ephemeralAddress } = ephemeralAccount;
await etherFaucet(provider, minter, ephemeralAddress, SMALL_AMOUNT);
await expectExactEtherBalances(
provider,
[ephemeralAddress],
[SMALL_AMOUNT]
);
});
it(
"ephemeral account shouldn't be able to transfer funds from contract-based account without allowance",
(done) => {
(async () => {
const balanceBefore = await erc20.balanceOf(GNOSIS_SAFE_ADDRESS);
const { address: toAddress } = someone;
gnosisSafe.setAccount(ephemeralAccount);
const action = erc20.transferFrom(
GNOSIS_SAFE_ADDRESS,
toAddress,
ONE
);
const errorListener = sinon.fake(async (error) => {
const { message } = error;
console.info(message);
action.unsubscribe();
await expectExactTokenBalances(
erc20,
[GNOSIS_SAFE_ADDRESS],
[balanceBefore]
);
done();
});
const confirmationListener = () => {
done(new Error());
};
action.on("error", errorListener);
action.on("confirmation", confirmationListener);
action.send();
})();
}
);
describe("test cases that need ERC20 spending approval for ephemeral account", () => {
// TODO: Move to @tasit/link-wallet
// contract-based accounts' owner should approve an ephemeral account to spend funds
beforeEach(async () => {
const tokenAddress = ERC20_ADDRESS;
const { address: spenderAddress } = ephemeralAccount;
gnosisSafe.setSigners([gnosisSafeOwner]);
gnosisSafe.setAccount(gnosisSafeOwner);
const action = gnosisSafe.approveERC20(
tokenAddress,
spenderAddress,
SMALL_AMOUNT
);
await action.send();
await action.waitForOneConfirmation();
await mineBlocks(provider, 1);
const amountAllowed = await erc20.allowance(
GNOSIS_SAFE_ADDRESS,
spenderAddress
);
expect(`${amountAllowed}`).toBe(`${SMALL_AMOUNT}`);
}
);
it(
"ephemeral account should transfer allowed funds from the contract-based account",
async () => {
const balanceBefore = await erc20.balanceOf(GNOSIS_SAFE_ADDRESS);
const { address: toAddress } = someone;
erc20.setAccount(ephemeralAccount);
const action = erc20.transferFrom(
GNOSIS_SAFE_ADDRESS,
toAddress,
SMALL_AMOUNT
);
await action.send();
await action.waitForOneConfirmation();
await mineBlocks(provider, 1);
const expectedBalance = balanceBefore.sub(SMALL_AMOUNT);
await expectExactTokenBalances(
erc20,
[GNOSIS_SAFE_ADDRESS],
[expectedBalance]
);
await expectExactTokenBalances(erc20, [toAddress], [SMALL_AMOUNT]);
}
);
});
});
});
describe("test cases that need NFT deposit to the contract-based account", () => {
const tokenId = 1;
// faucet
beforeEach(async () => {
await erc721Faucet(nft, minter, GNOSIS_SAFE_ADDRESS, tokenId);
await expectExactTokenBalances(nft, [GNOSIS_SAFE_ADDRESS], [1]);
});
it("contract-based account should send NFT tokens to someone", async () => {
const tokenAddress = NFT_ADDRESS;
const { address: toAddress } = someone;
gnosisSafe.setSigners([gnosisSafeOwner]);
gnosisSafe.setAccount(gnosisSafeOwner);
const action = gnosisSafe.transferNFT(tokenAddress, toAddress, tokenId);
await action.send();
await action.waitForOneConfirmation();
await expectExactTokenBalances(nft, [GNOSIS_SAFE_ADDRESS], [ZERO]);
await expectExactTokenBalances(nft, [toAddress], [1]);
});
});
}); | the_stack |
export const KEY_CANCEL = 3;
export const KEY_HELP = 6;
export const KEY_BACK_SPACE = 8;
export const KEY_TAB = 9;
export const KEY_CLEAR = 12;
export const KEY_RETURN = 13;
export const KEY_SHIFT = 16;
export const KEY_CONTROL = 17;
export const KEY_ALT = 18;
export const KEY_PAUSE = 19;
export const KEY_CAPS_LOCK = 20;
export const KEY_ESCAPE = 27;
export const KEY_SPACE = 32;
export const KEY_PAGE_UP = 33;
export const KEY_PAGE_DOWN = 34;
export const KEY_END = 35;
export const KEY_HOME = 36;
export const KEY_LEFT = 37;
export const KEY_UP = 38;
export const KEY_RIGHT = 39;
export const KEY_DOWN = 40;
export const KEY_PRINTSCREEN = 44;
export const KEY_INSERT = 45;
export const KEY_DELETE = 46;
export const KEY_0 = 48;
export const KEY_1 = 49;
export const KEY_2 = 50;
export const KEY_3 = 51;
export const KEY_4 = 52;
export const KEY_5 = 53;
export const KEY_6 = 54;
export const KEY_7 = 55;
export const KEY_8 = 56;
export const KEY_9 = 57;
export const KEY_A = 65;
export const KEY_B = 66;
export const KEY_C = 67;
export const KEY_D = 68;
export const KEY_E = 69;
export const KEY_F = 70;
export const KEY_G = 71;
export const KEY_H = 72;
export const KEY_I = 73;
export const KEY_J = 74;
export const KEY_K = 75;
export const KEY_L = 76;
export const KEY_M = 77;
export const KEY_N = 78;
export const KEY_O = 79;
export const KEY_P = 80;
export const KEY_Q = 81;
export const KEY_R = 82;
export const KEY_S = 83;
export const KEY_T = 84;
export const KEY_U = 85;
export const KEY_V = 86;
export const KEY_W = 87;
export const KEY_X = 88;
export const KEY_Y = 89;
export const KEY_Z = 90;
export const KEY_LEFT_CMD = 91;
export const KEY_RIGHT_CMD = 92;
export const KEY_CONTEXT_MENU = 93;
export const KEY_NUMPAD0 = 96;
export const KEY_NUMPAD1 = 97;
export const KEY_NUMPAD2 = 98;
export const KEY_NUMPAD3 = 99;
export const KEY_NUMPAD4 = 100;
export const KEY_NUMPAD5 = 101;
export const KEY_NUMPAD6 = 102;
export const KEY_NUMPAD7 = 103;
export const KEY_NUMPAD8 = 104;
export const KEY_NUMPAD9 = 105;
export const KEY_MULTIPLY = 106;
export const KEY_ADD = 107;
export const KEY_SUBTRACT = 109;
export const KEY_DECIMAL = 110;
export const KEY_DIVIDE = 111;
export const KEY_F1 = 112;
export const KEY_F2 = 113;
export const KEY_F3 = 114;
export const KEY_F4 = 115;
export const KEY_F5 = 116;
export const KEY_F6 = 117;
export const KEY_F7 = 118;
export const KEY_F8 = 119;
export const KEY_F9 = 120;
export const KEY_F10 = 121;
export const KEY_F11 = 122;
export const KEY_F12 = 123;
export const KEY_F13 = 124;
export const KEY_F14 = 125;
export const KEY_F15 = 126;
export const KEY_F16 = 127;
export const KEY_F17 = 128;
export const KEY_F18 = 129;
export const KEY_F19 = 130;
export const KEY_F20 = 131;
export const KEY_F21 = 132;
export const KEY_F22 = 133;
export const KEY_F23 = 134;
export const KEY_F24 = 135;
export const KEY_NUM_LOCK = 144;
export const KEY_SCROLL_LOCK = 145;
export const KEY_SEMICOLON = 186;
export const KEY_EQUALS = 187;
export const KEY_COMMA = 188;
export const KEY_DASH = 189;
export const KEY_PERIOD = 190;
export const KEY_SLASH = 191;
export const KEY_BACK_QUOTE = 192;
export const KEY_OPEN_BRACKET = 219;
export const KEY_BACK_SLASH = 220;
export const KEY_CLOSE_BRACKET = 221;
export const KEY_QUOTE = 222;
// Key code constants specific to firefox only.
export const KEY_FIREFOX_ENTER = 14;
export const KEY_FIREFOX_SEMICOLON = 59;
export const KEY_FIREFOX_EQUALS = 61;
export const KEY_FIREFOX_SEPARATOR = 108;
export const KEY_FIREFOX_META = 224;
// Key values constants
// Available values for `KeyboardEvent.key` attribute.
export const VALUE_CANCEL = 'Cancel';
export const VALUE_HELP = 'Help';
export const VALUE_BACK_SPACE = 'Backspace';
export const VALUE_TAB = 'Tab';
export const VALUE_CLEAR = 'Clear';
export const VALUE_ENTER = 'Enter';
export const VALUE_RETURN = 'Enter';
export const VALUE_SHIFT = 'Shift';
export const VALUE_CONTROL = 'Control';
export const VALUE_ALT = 'Alt';
export const VALUE_PAUSE = 'Pause';
export const VALUE_CAPS_LOCK = 'CapsLock';
export const VALUE_ESCAPE = 'Escape';
export const VALUE_SPACE = ' ';
export const VALUE_PAGE_UP = 'PageUp';
export const VALUE_PAGE_DOWN = 'PageDown';
export const VALUE_END = 'End';
export const VALUE_HOME = 'Home';
export const VALUE_LEFT = 'ArrowLeft';
export const VALUE_UP = 'ArrowUp';
export const VALUE_RIGHT = 'ArrowRight';
export const VALUE_DOWN = 'ArrowDown';
export const VALUE_PRINTSCREEN = 'PrintScreen';
export const VALUE_INSERT = 'Insert';
export const VALUE_DELETE = 'Delete';
export const VALUE_0 = '0';
export const VALUE_1 = '1';
export const VALUE_2 = '2';
export const VALUE_3 = '3';
export const VALUE_4 = '4';
export const VALUE_5 = '5';
export const VALUE_6 = '6';
export const VALUE_7 = '7';
export const VALUE_8 = '8';
export const VALUE_9 = '9';
export const VALUE_A = 'a';
export const VALUE_B = 'b';
export const VALUE_C = 'c';
export const VALUE_D = 'd';
export const VALUE_E = 'e';
export const VALUE_F = 'f';
export const VALUE_G = 'g';
export const VALUE_H = 'h';
export const VALUE_I = 'i';
export const VALUE_J = 'j';
export const VALUE_K = 'k';
export const VALUE_L = 'l';
export const VALUE_M = 'm';
export const VALUE_N = 'n';
export const VALUE_O = 'o';
export const VALUE_P = 'p';
export const VALUE_Q = 'q';
export const VALUE_R = 'r';
export const VALUE_S = 's';
export const VALUE_T = 't';
export const VALUE_U = 'u';
export const VALUE_V = 'v';
export const VALUE_W = 'w';
export const VALUE_X = 'x';
export const VALUE_Y = 'y';
export const VALUE_Z = 'z';
export const VALUE_META = 'Meta';
export const VALUE_LEFT_CMD = 'Meta';
export const VALUE_RIGHT_CMD = 'Meta';
export const VALUE_CONTEXT_MENU = 'ContextMenu';
export const VALUE_NUMPAD0 = '0';
export const VALUE_NUMPAD1 = '1';
export const VALUE_NUMPAD2 = '2';
export const VALUE_NUMPAD3 = '3';
export const VALUE_NUMPAD4 = '4';
export const VALUE_NUMPAD5 = '5';
export const VALUE_NUMPAD6 = '6';
export const VALUE_NUMPAD7 = '7';
export const VALUE_NUMPAD8 = '8';
export const VALUE_NUMPAD9 = '9';
export const VALUE_MULTIPLY = '*';
export const VALUE_ADD = '+';
export const VALUE_SUBTRACT = '-';
export const VALUE_DECIMAL = '.';
export const VALUE_DIVIDE = '/';
export const VALUE_F1 = 'F1';
export const VALUE_F2 = 'F2';
export const VALUE_F3 = 'F3';
export const VALUE_F4 = 'F4';
export const VALUE_F5 = 'F5';
export const VALUE_F6 = 'F6';
export const VALUE_F7 = 'F7';
export const VALUE_F8 = 'F8';
export const VALUE_F9 = 'F9';
export const VALUE_F10 = 'F10';
export const VALUE_F11 = 'F11';
export const VALUE_F12 = 'F12';
export const VALUE_F13 = 'F13';
export const VALUE_F14 = 'F14';
export const VALUE_F15 = 'F15';
export const VALUE_F16 = 'F16';
export const VALUE_F17 = 'F17';
export const VALUE_F18 = 'F18';
export const VALUE_F19 = 'F19';
export const VALUE_F20 = 'F20';
export const VALUE_F21 = 'F21';
export const VALUE_F22 = 'F22';
export const VALUE_F23 = 'F23';
export const VALUE_F24 = 'F24';
export const VALUE_NUM_LOCK = 'NumLock';
export const VALUE_SCROLL_LOCK = 'ScrollLock';
export const VALUE_SEMICOLON = ';';
export const VALUE_EQUALS = '=';
export const VALUE_COMMA = ',';
export const VALUE_DASH = '-';
export const VALUE_PERIOD = '.';
export const VALUE_SLASH = '/';
export const VALUE_BACK_QUOTE = '`';
export const VALUE_OPEN_BRACKET = '[';
export const VALUE_BACK_SLASH = '\\';
export const VALUE_CLOSE_BRACKET = ']';
export const VALUE_QUOTE = "'";
// Key code constants
// Available values for `KeyboardEvent.code` attribute.
export const CODE_UNIDENTIFIED = 'Unidentified';
export const CODE_ESCAPE = 'Escape';
export const CODE_MINUS = 'Minus';
export const CODE_DASH = 'Minus';
export const CODE_EQUALS = 'Equal';
export const CODE_BACK_SPACE = 'Backspace';
export const CODE_TAB = 'Tab';
export const CODE_ENTER = 'Enter';
export const CODE_RETURN = 'Enter';
export const CODE_SHIFT_LEFT = 'ShiftLeft';
export const CODE_SHIFT_RIGHT = 'ShiftRight';
export const CODE_CONTROL_LEFT = 'ControlLeft';
export const CODE_CONTROL_RIGHT = 'ControlRight';
export const CODE_ALT_LEFT = 'AltLeft';
export const CODE_ALT_RIGHT = 'AltRight';
export const CODE_PAUSE = 'Pause';
export const CODE_CAPS_LOCK = 'CapsLock';
export const CODE_SPACE = 'Space';
export const CODE_PAGE_UP = 'PageUp';
export const CODE_PAGE_DOWN = 'PageDown';
export const CODE_END = 'End';
export const CODE_HOME = 'Home';
export const CODE_LEFT = 'ArrowLeft';
export const CODE_UP = 'ArrowUp';
export const CODE_RIGHT = 'ArrowRight';
export const CODE_DOWN = 'ArrowDown';
export const CODE_PRINTSCREEN = 'PrintScreen';
export const CODE_INSERT = 'Insert';
export const CODE_DELETE = 'Delete';
export const CODE_0 = 'Digit0';
export const CODE_1 = 'Digit1';
export const CODE_2 = 'Digit2';
export const CODE_3 = 'Digit3';
export const CODE_4 = 'Digit4';
export const CODE_5 = 'Digit5';
export const CODE_6 = 'Digit6';
export const CODE_7 = 'Digit7';
export const CODE_8 = 'Digit8';
export const CODE_9 = 'Digit9';
export const CODE_A = 'KeyA';
export const CODE_B = 'KeyB';
export const CODE_C = 'KeyC';
export const CODE_D = 'KeyD';
export const CODE_E = 'KeyE';
export const CODE_F = 'KeyF';
export const CODE_G = 'KeyG';
export const CODE_H = 'KeyH';
export const CODE_I = 'KeyI';
export const CODE_J = 'KeyJ';
export const CODE_K = 'KeyK';
export const CODE_L = 'KeyL';
export const CODE_M = 'KeyM';
export const CODE_N = 'KeyN';
export const CODE_O = 'KeyO';
export const CODE_P = 'KeyP';
export const CODE_Q = 'KeyQ';
export const CODE_R = 'KeyR';
export const CODE_S = 'KeyS';
export const CODE_T = 'KeyT';
export const CODE_U = 'KeyU';
export const CODE_V = 'KeyV';
export const CODE_W = 'KeyW';
export const CODE_X = 'KeyX';
export const CODE_Y = 'KeyY';
export const CODE_Z = 'KeyZ';
export const CODE_META_LEFT = 'MetaLeft';
export const CODE_OS_LEFT = 'OSLeft';
export const CODE_META_RIGHT = 'MetaRight';
export const CODE_OS_RIGHT = 'OSRight';
export const CODE_CONTEXT_MENU = 'ContextMenu';
export const CODE_NUMPAD0 = 'Numpad0';
export const CODE_NUMPAD1 = 'Numpad1';
export const CODE_NUMPAD2 = 'Numpad2';
export const CODE_NUMPAD3 = 'Numpad3';
export const CODE_NUMPAD4 = 'Numpad4';
export const CODE_NUMPAD5 = 'Numpad5';
export const CODE_NUMPAD6 = 'Numpad6';
export const CODE_NUMPAD7 = 'Numpad7';
export const CODE_NUMPAD8 = 'Numpad8';
export const CODE_NUMPAD9 = 'Numpad9';
export const CODE_NUMPAD_MULTIPLY = 'NumpadMultiply';
export const CODE_NUMPAD_ADD = 'NumpadAdd';
export const CODE_NUMPAD_SUBTRACT = 'NumpadSubtract';
export const CODE_NUMPAD_DECIMAL = 'NumpadDecimal';
export const CODE_NUMPAD_DIVIDE = 'NumpadDivide';
export const CODE_NUMPAD_ENTER = 'NumpadEnter';
export const CODE_F1 = 'F1';
export const CODE_F2 = 'F2';
export const CODE_F3 = 'F3';
export const CODE_F4 = 'F4';
export const CODE_F5 = 'F5';
export const CODE_F6 = 'F6';
export const CODE_F7 = 'F7';
export const CODE_F8 = 'F8';
export const CODE_F9 = 'F9';
export const CODE_F10 = 'F10';
export const CODE_F11 = 'F11';
export const CODE_F12 = 'F12';
export const CODE_F13 = 'F13';
export const CODE_F14 = 'F14';
export const CODE_F15 = 'F15';
export const CODE_F16 = 'F16';
export const CODE_F17 = 'F17';
export const CODE_F18 = 'F18';
export const CODE_F19 = 'F19';
export const CODE_F20 = 'F20';
export const CODE_F21 = 'F21';
export const CODE_F22 = 'F22';
export const CODE_F23 = 'F23';
export const CODE_F24 = 'F24';
export const CODE_NUM_LOCK = 'NumLock';
export const CODE_SCROLL_LOCK = 'ScrollLock';
export const CODE_SEMICOLON = 'Semicolon';
export const CODE_COMMA = 'Comma';
export const CODE_PERIOD = 'Period';
export const CODE_SLASH = 'Slash';
export const CODE_BACK_QUOTE = 'Backquote';
export const CODE_OPEN_BRACKET = 'BracketLeft';
export const CODE_BACK_SLASH = 'Backslash';
export const CODE_CLOSE_BRACKET = 'BracketRight';
export const CODE_QUOTE = 'Quote'; | the_stack |
window.CRMLoaded = window.CRMLoaded || {
listener: null,
register: (fn) => {
window.CRMLoaded.listener = fn;
}
}
type StyleString = Extract<keyof CSSStyleDeclaration, string>;
window.CRMLoaded.register(() => {
//Because of a chrome bug that causes it to freeze
// when handling at least this regex string
// /((.+,(\s+))*(.+)+){(((\s+)(.*)(\s+))+)}/
// the regex engine cannot be applied, so this
// will have to do
function findStylesheetBlocks(text: string) {
const rules: {
elements: string;
ruleText: string;
}[] = [];
const textSplit = text.split('}');
textSplit.slice(0, -1).forEach((block) => {
const bracketIndex = block.indexOf('{');
const selector = block.slice(0, bracketIndex).trim();
const ruleText = block.slice(bracketIndex + 1).trim();
rules.push({
elements: selector,
ruleText: ruleText
});
});
return rules;
}
function makeNum(str: string|number): number {
if (typeof str === 'number') {
return str;
}
const splitStr = str.split('.');
let num = ~~str;
if (splitStr.length > 1) {
num = num + (~~splitStr[1] / 10);
}
return num;
}
function calc(terms: (string|number)[]): number {
let index;
if ((index = terms.indexOf('*')) > -1) {
return calc(terms.slice(0, index - 1).concat([
makeNum(terms[index - 1]) * makeNum(terms[index + 1])
]).concat(terms.slice(index + 2)));
} else if ((index = terms.indexOf('/')) > -1) {
return calc(terms.slice(0, index - 1).concat([
makeNum(terms[index - 1]) / makeNum(terms[index + 1])
]).concat(terms.slice(index + 2)));
} else if (terms.indexOf('-') > -1 || terms.indexOf('+') > -1) {
terms.forEach((term, index) => {
if (term === '-') {
terms[index + 1] = -1 * makeNum(terms[index + 1]);
terms[index] = '+';
}
});
if (terms.length === 0) {
return 0;
} else if (terms.length === 1) {
return makeNum(terms[0]);
} else {
return terms.reduce((prevValue, currentValue) => {
return makeNum(prevValue) + makeNum(currentValue);
}) as number;
}
} else {
return makeNum(terms[0]);
}
}
function splitTerms(str: string): string[] {
let index = 0;
if ((index = str.indexOf('+')) > -1) {
return []
.concat(splitTerms(str.slice(0, index)))
.concat(['+'])
.concat(splitTerms(str.slice(index + 1)));
} else if ((index = str.indexOf('-')) > -1) {
return []
.concat(splitTerms(str.slice(0, index)))
.concat(['-'])
.concat(splitTerms(str.slice(index + 1)));
} else if ((index = str.indexOf('*')) > -1) {
return []
.concat(splitTerms(str.slice(0, index)))
.concat(['*'])
.concat(splitTerms(str.slice(index + 1)));
} else if ((index = str.indexOf('/')) > -1) {
return []
.concat(splitTerms(str.slice(0, index)))
.concat(['/'])
.concat(splitTerms(str.slice(index + 1)));
} else {
return [str];
}
}
function calculate(str: string): number {
const parenthesesRegex = /\((.*)\)/;
let match = null;
if ((match = parenthesesRegex.exec(str))) {
return calculate(str.replace(match[0], calculate(match[1]) + ''));
} else {
return calc(splitTerms(str).map((term) => {
return term.trim();
}));
}
}
/**
* @type {Array<HTMLElement>}
*/
let allRoots: (HTMLElement|ShadowRoot)[] = null;
function getAllRoots() {
if (allRoots) {
return allRoots;
}
const docEl = document.documentElement;
allRoots = [docEl as HTMLElement|ShadowRoot].concat(getRoots(docEl.children as any));
return allRoots;
}
function querySelectorEverything(selector: string) {
return flatten<HTMLElement>(getAllRoots().map((root: HTMLElement) => {
return root.querySelectorAll(selector);
}) as any);
}
function traverseDom(node: HTMLElement, fn: (node: HTMLElement) => void) {
fn(node);
if (node.children) {
for (let i = 0; i < node.children.length; i++) {
traverseDom(node.children[i] as HTMLElement, fn);
}
}
}
function getRoots(children: HTMLElement[]) {
const roots: ShadowRoot[] = [];
for (let i = 0; i < children.length; i++) {
traverseDom(children[i], (node) => {
if (node.shadowRoot) {
roots.push(node.shadowRoot);
}
});
}
return roots;
}
function flatten<T>(arr: T[][]): T[] {
const newArr = [];
for (let i = 0; i < arr.length; i++) {
for (let j = 0; j < arr[i].length; j++) {
newArr.push(arr[i][j]);
}
}
return newArr;
}
function breakdownSelector(selector: string) {
const parts = selector.split(' ');
let current = querySelectorEverything(parts[0]);
for (let i = 1; i < parts.length; i++) {
current = flatten<HTMLElement>(current.map((node: HTMLElement) => {
if (node.shadowRoot) {
return node.shadowRoot;
}
return node.shadowRoot;
}).map((node: HTMLElement|ShadowRoot) => {
return node.querySelectorAll(parts[i]);
}) as any);
}
return current;
}
function toArr(els: HTMLCollection): HTMLElement[] {
return Array.prototype.slice.apply(els);
}
let calcSupported: boolean;
const toUpdate: {
elements: HTMLElement[];
calculation:string;
key: StyleString;
}[] = [];
window.addCalcFn = (element: HTMLElement, prop: StyleString, calcValue: string, disable: boolean) => {
if (calcSupported && !disable && prop !== 'length' && prop !== 'parentRule') {
(element.style as any)[prop] = 'calc(' + calcValue + ')';
return;
}
//Check if it already has an object defined on it
for (let i = 0; i < toUpdate.length; i++) {
if (toUpdate[i].elements.indexOf(element) > -1 &&
toUpdate[i].key === prop) {
toUpdate.splice(i, 1);
break;
}
}
if (disable) {
return;
}
const calcObject = {
elements: [element],
calculation: calcValue,
key: prop
};
toUpdate.push(calcObject);
updateCalcs();
};
function updateCalcs() {
toUpdate.forEach((calcToUpdate) => {
const result = calculate(calcToUpdate.calculation
.replace('vw', ' * ' + window.innerWidth / 100)
.replace('vh', ' * ' + window.innerHeight / 100)
.replace('em', ' * 16')
.replace('px', ''));
calcToUpdate.elements.forEach((element) => {
if (calcToUpdate.key !== 'length' && calcToUpdate.key !== 'parentRule')
(element.style as any)[calcToUpdate.key] = result + 'px';
});
})
}
(() => {
const el = document.createElement('div');
el.style.cssText = 'width: calc(100vw - 100px)';
calcSupported = !!el.style.length;
if (!calcSupported) {
(((calcs) => {
calcs.forEach((calc) => {
let key = calc.key;
let dashIndex;
while ((dashIndex = key.indexOf('-')) > -1) {
key = key.slice(0, dashIndex) +
key[dashIndex + 1].toUpperCase() +
key.slice(dashIndex + 2) as StyleString;
}
calc.elements = breakdownSelector(calc.elements as string);
if (!calc.elements) {
return null;
}
const elements = toArr(calc.elements as any);
if (calc.calculation.indexOf('vh') > -1 || calc.calculation.indexOf('vw') > -1) {
toUpdate.push({
calculation: calc.calculation,
elements: elements,
key: key
});
}
const calcString = calc.calculation
.replace('vw', ' * ' + window.innerWidth / 100)
.replace('vh', ' * ' + window.innerHeight / 100)
.replace('em', ' * 16')
.replace('px', '');
const result = calculate(calcString);
elements.forEach((el) => {
if (key !== 'length' && key !== 'parentRule') {
(el.style as any)[key] = result + 'px';
}
});
});
window.onresize = () => {
updateCalcs();
}
})(((stylesheetBlocks) => {
const calculations: {
elements: string|HTMLElement[];
key: StyleString;
calculation: string;
}[] = [];
const calcRegex = /calc\((.+)\)/;
stylesheetBlocks.forEach((stylesheetBlock) => {
stylesheetBlock && stylesheetBlock.rules && stylesheetBlock.rules.forEach((stylesheetRule) => {
let match = null;
if ((match = calcRegex.exec(stylesheetRule.value))) {
if (match[1].indexOf('vh') > -1 || match[1].indexOf('vw') > -1) {
calculations.push({
elements: stylesheetBlock.elements,
key: stylesheetRule.key,
calculation: match[1]
});
}
}
});
});
return calculations;
})(((stylesheetBlocks) => {
const cssRuleRegex = /(\s*)((\w|-)+)(\s*):(\s*)((\w|%|\/|\*|\+|\(|\)|-|,|\s)+);(\s*)/;
return stylesheetBlocks.map((stylesheetBlock) => {
if (stylesheetBlock.ruleText.indexOf('calc') === -1 ||
(stylesheetBlock.ruleText.indexOf('vh') === -1 && stylesheetBlock.ruleText.indexOf('vw') === -1)) {
return null;
}
const rules: {
key: StyleString;
value: string;
}[] = [];
const blockMatch = stylesheetBlock.ruleText.match(/(\s*)((\w|-)+)(\s*):(\s*)((\w|%|\/|\*|\+|\(|\)|-|,|\s)+);(\s*)/g);
if (blockMatch) {
blockMatch.forEach((matchedString) => {
const match = matchedString.match(cssRuleRegex)
rules.push({
key: match[2] as StyleString,
value: match[6]
});
});
return {
rules: rules,
elements: stylesheetBlock.elements
};
}
return null;
}).filter((block) => block !== null);
})(((stylesheets) => {
let stylesheetBlocks: {
elements: string;
ruleText: string;
}[] = [];
stylesheets.forEach((stylesheetContent) => {
if (stylesheetContent.indexOf('calc(') === -1 ||
(stylesheetContent.indexOf('vh') === -1 && stylesheetContent.indexOf('vw') === -1)) {
return null;
}
stylesheetBlocks = stylesheetBlocks.concat(findStylesheetBlocks(stylesheetContent));
});
return stylesheetBlocks;
})(((linkElements: (HTMLLinkElement|HTMLStyleElement)[]) => {
return linkElements.map((linkElement) => {
if (linkElement.tagName === 'STYLE') {
return linkElement.textContent;
} else {
const xhr = new XMLHttpRequest;
xhr.open('GET', linkElement.sheet.href, false);
xhr.send();
if (xhr.status === 200) {
return xhr.responseText;
} else {
return '';
}
}
});
})(querySelectorEverything('style, link[rel="stylesheet"]') as (HTMLLinkElement|HTMLStyleElement)[]))))))
}
el.remove();
})();
}); | the_stack |
import { Electronic } from '../base';
import { LinePath } from './path';
import { LineData, LinePin, LinePinStatus, LineStructuredData } from './types';
import { ElectronicKind, Context } from '../types';
import { debug } from '@circuit/debug';
import { MarkNodeKind, MarkMapNode, Label } from '@circuit/map';
import { PointLike, Point } from '@circuit/math';
import { isLine } from '@circuit/shared';
import { isBoolean, isUndef, delay, ConstructorParameters } from '@xiao-ai/utils';
import type { Part } from '../part';
export * from './types';
export * from './path';
export class Line extends Electronic {
constructor(paths: PointLike[] = [], context?: Context) {
super(ElectronicKind.Line, context);
this.path = LinePath.from(paths);
}
private _path!: LinePath;
private _points: LinePinStatus[] = [];
/** 导线路径 */
get path() {
return this._path;
}
set path(val: LinePath) {
this._path = val;
this.updatePoints();
}
/** 导线引脚状态 */
get points() {
return this._points;
}
protected updatePoints() {
if (this.path.length === 0) {
this._points = [];
}
for (let i = 0; i < 2; i++) {
const position = this.path[i * (this.path.length - 1)];
const oldData = this._points[i];
this._points[i] = {
...oldData,
index: i,
position,
status: this.connections[i].getStatus(),
};
}
this.updateView();
}
/** 创建新导线 */
create(...args: ConstructorParameters<typeof Line>) {
return new Line(...args);
}
/**
* 拆分导线
* @param {string} id 被拆分导线的 id
* @param {LinePin} index 当前导线的起点/终点作为分割点
*/
split(id: string, pin: LinePin.Start | LinePin.End) {
/** 被切分的导线 */
const splitLine = this.find<Line>(id)!;
/** 分割点坐标 */
const crossPoint = Point.from(this.path.get(-1 * pin));
/** 分割点在被分割导线的第几个线段 */
const crossSub = (() => {
for (let i = 0; i < splitLine.path.length - 1; i++) {
if (crossPoint.isInLine([splitLine.path[i], splitLine.path[i + 1]])) {
return i;
}
}
})();
if (isUndef(crossSub)) {
throw new Error(`分割导线失败`);
}
// 删除被分割导线的所有标记
splitLine.deleteMark();
// 生成新导线
const newLine = this.create([[1e6, 1e6]]);
// 新导线终点连接替代被分割导线终点连接
newLine.setDeepConnection(1, splitLine.connections[1]);
// 新导线起点由被分割导线终点和分割旧导线的导线组成
newLine.setDeepConnection(0, [
{
id: splitLine.id,
mark: 1,
},
{
id: this.id,
mark: pin,
},
]);
// 被分割导线终点连接变更
splitLine.setDeepConnection(1, [
{
id: newLine.id,
mark: 0,
},
{
id: this.id,
mark: pin,
},
]);
// 新导线路径为交错点至被分割导线终点
newLine.path = LinePath.from(splitLine.path.slice(crossSub + 1));
newLine.path.unshift(crossPoint);
newLine.path.removeRepeat();
// 被分割导线路径变更为起点至交错点部分
splitLine.path = LinePath.from(splitLine.path.slice(0, crossSub + 1));
splitLine.path.push(crossPoint);
splitLine.path.removeRepeat();
// 更新节点
this.updatePoints();
splitLine.updatePoints();
newLine.updatePoints();
// 标记图纸
this.setMark();
splitLine.setMark();
newLine.setMark();
// 更新视图
this.updateView();
splitLine.updateView();
newLine.updateView();
}
/** 合并导线 */
concat(id: string) {
const line = this.find<Line>(id);
if (!line) {
throw new Error(`导线合并失败,未发现编号:'${id}'的导线。`);
}
/** 连接点 */
let crossIndex: 0 | 1 = 0;
// 连接导线的路径
if (this.path[0].isEqual(line.path[0])) {
line.reverse();
crossIndex = 0;
}
else if (this.path[0].isEqual(line.path.get(-1))) {
crossIndex = 0;
}
else if (this.path.get(-1).isEqual(line.path.get(-1))) {
line.reverse();
crossIndex = 1;
}
else {
crossIndex = 1;
}
this.setConnection(crossIndex, line.connections[crossIndex]);
this.path = LinePath.from(
crossIndex === 0
? line.path.concat(this.path)
: this.path.concat(line.path)
).removeRepeat();
line.delete();
this.updateView();
this.setMark();
}
/** 设置标志位 */
setMark() {
const { path, map } = this;
let last: MarkMapNode | undefined;
let current: MarkMapNode | undefined;
for (const point of path.forEachPoint()) {
current = map.get(point);
const index = this.points.findIndex((item) => item.position.isEqual(point));
if (!current) {
current = map.set({
position: point,
id: this.id,
mark: index,
});
}
else if (current.kind === MarkNodeKind.PartPin) {
// 过滤掉所有非器件连接
current.labels = current.labels.filter((item) => !isLine(item.id)) as Label;
current.labels.add(this.id, index);
}
else if (current.isLine) {
// 过滤掉所有当前器件已经标记的点
current.labels = current.labels.filter((item) => item.id !== this.id) as Label;
current.labels.add(this.id, index);
}
else {
const info = `Illegal point(${point.join(',')}): ${MarkNodeKind[current.kind]}`;
if (process.env.NODE_ENV === 'development') {
debug.point(point, 'red');
throw new Error(info);
}
else {
console.error(info);
return;
}
}
if (last && current) {
last.connections.add(current.position);
current.connections.add(last.position);
}
last = current;
}
}
/** 删除标记 */
deleteMark() {
const { id, path, map } = this;
let lastPoint: Point | undefined;
let lastNode: MarkMapNode | undefined;
let current: MarkMapNode | undefined;
for (const point of path.forEachPoint()) {
current = map.get(point);
const index = this.points.findIndex((item) => item.position.isEqual(point));
if (lastNode) {
lastNode.connections.delete(point);
}
if (current && lastPoint) {
current.connections.delete(lastPoint);
}
if (!current) {
continue;
}
// 普通点
if (
current.kind === MarkNodeKind.Line ||
current.kind === MarkNodeKind.LineSpacePoint
) {
map.delete(point);
}
// 交错/覆盖节点
else if (
current.kind === MarkNodeKind.LineCoverPoint ||
current.kind === MarkNodeKind.LineCrossPoint ||
current.kind === MarkNodeKind.PartPin
) {
current.labels.delete(id, index);
if (!current.labels.value) {
map.delete(point);
}
}
lastPoint = point;
lastNode = current;
}
}
/**
* 由引脚信息设置导线两端连接
* @param {boolean} [concat=true] 是否合并浮动导线
*/
setConnectionByPath(concat?: boolean): void;
/**
* 由引脚信息设置导线端点连接
* @param {LinePin} [index] 需要设定的引脚
* @param {boolean} [concat=true] 是否合并浮动导线
*/
setConnectionByPath(pin?: LinePin.Start | LinePin.End, concat?: boolean): void;
setConnectionByPath(pin?: LinePin.Start | LinePin.End | boolean, concat = true) {
if (isBoolean(pin)) {
this.setConnectionByPath(LinePin.Start, pin);
this.setConnectionByPath(LinePin.End, pin);
return;
}
else if (isUndef(pin)) {
this.setConnectionByPath(0);
this.setConnectionByPath(1);
return;
}
const index = pin === LinePin.Start ? 0 : 1;
const node = this.path.get(-1 * index).round();
const status = this.map.get(node);
// 端点为空
if (!status) {
this.setConnection(index);
}
// 端点为器件引脚
else if (status.kind === MarkNodeKind.PartPin) {
const label = status.labels.value!;
const part = this.find<Part>(label.id)!;
const mark = label.mark!;
this.setDeepConnection(index, {
id: part.id,
mark: mark,
});
}
// 端点在导线上
else if (status.kind === MarkNodeKind.Line) {
if (this.hasConnection(status.labels.value!.id, status.labels.value!.mark)) {
/**
* 因为`setConnectByPin`函数运行之后可能还有后续动作
* 所以这里需要等待一个更新周期
*/
delay().then(() => this.delete());
}
else {
this.split(status.labels.value!.id, pin);
}
}
// 端点为导线空引脚
else if (status.kind === MarkNodeKind.LineSpacePoint) {
const { id, mark } = status.labels.value!;
// 允许合并
if (concat) {
this.concat(id);
}
// 不允许合并,则该点变更为交错节点
else {
const line = this.find<Line>(id)!;
this.setDeepConnection(index, {
id: line.id,
mark: mark,
});
line.updateView();
this.updateView();
}
}
// 端点在交错节点
else if (status.kind === MarkNodeKind.LineCrossPoint) {
// 排除当前导线节点
const restLabels = status.labels.filter((label) => {
return label.id !== this.id || label.mark !== index;
});
// 只有一个导线
if (concat && restLabels.length === 1) {
this.concat(restLabels[0].id);
}
else {
const allLabels = status.labels.toData().concat({
id: this.id,
mark: pin,
});
status.labels.forEach(({ id, mark }) => {
const line = this.find<Line>(id);
if (!line) {
throw new Error(`导线不存在:${id}`);
}
const lineConnection = allLabels.filter((item) => item.id !== id || item.mark !== mark);
line.setDeepConnection(mark, lineConnection);
});
}
}
}
/** 导线反转 */
reverse() {
const oldConnections = this.connections.map((item) => item.toData());
this.setDeepConnection(0, oldConnections[1]);
this.setDeepConnection(1, oldConnections[0]);
this.path.reverse();
// [原终点, 原起点]
const points = [this.path[0], this.path.get(-1)];
// 变更端点的数据记录
for (let i = 0; i < 2; i++) {
const data = this.map.get(points[i]);
if (data) {
data.labels.delete(this.id, 1 - i);
data.labels.add(this.id, i);
}
}
this.updatePoints();
}
/** 输出数据 */
toData(): Required<LineData> {
return {
kind: 'Line',
path: this.path.toData(),
};
}
/** 输出数据 */
toStructuredData(): LineStructuredData {
return {
id: this.id,
kind: ElectronicKind.Line,
path: this.path.toData(),
connections: this.connections.map((item) => item.toData()),
};
}
} | the_stack |
import { Seq } from "../src/Seq";
import { LinkedList } from "../src/LinkedList";
import { Vector } from "../src/Vector";
import { HashMap } from "../src/HashMap";
import { HashSet } from "../src/HashSet";
import { Stream } from "../src/Stream";
import { Option } from "../src/Option";
import { Predicate } from "../src/Predicate";
import { instanceOf, typeOf } from "../src/Comparison";
import { MyClass, MySubclass } from "./SampleData";
import { assertFailCompile } from "./TestHelpers";
import * as CollectionTest from './Collection';
require("mocha-testcheck").install();
const { gen, check} = require("mocha-testcheck");
import * as assert from 'assert'
/**
* @hidden
*/
export function runTests(seqName: string,
ofIterable: <T>(i:Iterable<T>)=>Seq<T>,
of: <T>(...i:Array<T>)=>Seq<T>,
empty: <T>()=>Seq<T>,
unfoldRight: <T,U>(seed: T, fn: (x:T)=>Option<[U,T]>)=>Seq<U>,
nonEmptySeqName_?:string) {
const nonEmptySeqName = nonEmptySeqName_ || seqName;
CollectionTest.runTests(seqName, of, empty);
describe(seqName + " creation", () => {
it("handles of() without parameters ok", () => assert.deepEqual(
[], of().toArray()));
it("creates from a JS array", () => assert.deepEqual(
["a","b", "c"],
ofIterable<string>(["a","b","c"]).toArray()));
it("creates from a spread", () => assert.deepEqual(
["a","b", "c"],
of("a","b","c").toArray()));
it("creates also with nulls", () => assert.deepEqual(
[1, null, 2], of(1, null, 2).toArray()));
it("supports ofIterable", () => assert.deepEqual(
[1,2,3], ofIterable([1,2,3]).toArray()));
// need to test ofIterable with a real iterable as well
// as an array -- vector for instance has an optimized codepath
// for arrays so we need to give a real iterable too for full coverage.
it("supports ofIterable from real iterable", () => assert.deepEqual(
[1,2,3], ofIterable(Stream.of(1,2,3)).toArray()));
it("supports of", () => assert.deepEqual(
[1,2,3], of(1,2,3).toArray()));
it("supports unfoldRight", () => assert.deepEqual(
[10,9,8,7,6,5,4,3,2,1], unfoldRight(
10, x=>Option.of(x)
.filter(x => x!==0)
.map<[number,number]>(x => [x,x-1])).toArray()));
});
describe(seqName + " prepend", () => {
const basic = of(1,2,3,4);
const prepended = of(2,3,4).prepend(1);
it("prepends correctly", () => assert.ok(basic.equals(prepended)));
it("converts to array correctly", () => assert.deepEqual(
basic.toArray(), prepended.toArray()));
it("appends correctly after prepend", () => assert.ok(
basic.append(5).equals(prepended.append(5))));
it("appendsAll correctly after prepend", () => assert.ok(
basic.appendAll([5,6]).equals(prepended.appendAll([5,6]))));
it("converts to string correctly after prepend", () => assert.equal(
basic.toString(), prepended.toString()));
it("prependsAll correctly", () => assert.deepEqual(
[1,2,3,4,5], of(4,5).prependAll([1,2,3]).toArray()));
});
describe(seqName + " Value tests", () => {
it("has non-obviously-broken equals", () => assert.ok(
of("a","b","c").equals(of("a", "b", "c"))));
it("doesn't throw when given another type on equals", () => assert.equal(
false, of(1).equals(<any>[1,2])));
it("doesn't throw when given null on equals", () => assert.equal(
false, of(1).equals(<any>null)));
it("is strict with equality", () => assert.ok(
!of(1,2).equals(of(1, <any>undefined))));
it("supports contain", () => assert.ok(
of(1,2,3).contains(2)));
it("should throw when using contains without true equality", () => assert.throws(
() => of(Option.of([1])).contains(Option.of([1]))));
it("rejects contain", () => assert.ok(
!of(1,2,3).contains(4)));
it("rejects contain, empty stream", () => assert.ok(
!empty().contains(4)));
it("supports contains, custom equality", () => assert.ok(
of(new MyClass("hi", 3)).contains(new MyClass("hi", 3))));
it("supports allMatch, positive case", () => assert.ok(
of(2,4,8).allMatch(x => x%2 === 0)));
it("supports allMatch, negative case", () => assert.ok(
!of(2,5,8).allMatch(x => x%2 === 0)));
it("supports allMatch, empty stream", () => assert.ok(
empty<number>().allMatch(x => x%2 === 0)));
it("supports anyMatch, positive case", () => assert.ok(
of(3,5,8).anyMatch(x => x%2 === 0)));
it("supports anyMatch, negative case", () => assert.ok(
!of(3,5,9).anyMatch(x => x%2 === 0)));
it("supports anyMatch, empty stream", () => assert.ok(
!empty<number>().anyMatch(x => x%2 === 0)));
it("should fail compilation on an obviously bad equality test", () =>
assertFailCompile(
seqName + ".of([1]).equals(" + seqName + ".of([1]))", "Argument of type \'" +
nonEmptySeqName + "<number[]>\' is not assignable to parameter"));
it("should fail compilation on an obviously bad contains test", () =>
assertFailCompile(
seqName + ".of([1]).contains([1])",
"Argument of type \'number[]\' is not assignable to parameter"));
it("should fail compilation trying to call removeAll without equality", () =>
assertFailCompile(
seqName + ".of([1]).removeAll([1])",
"Argument of type \'number[]\' is not assignable to parameter"));
});
describe(seqName + " iteration", () => {
// can get for..of in tests by changing the target to es6,
// or enabling downlevelIteration in the tsconfig.json,
// not doing that for now.
// it("supports for of", () => {
// let total = 0;
// for (const x of of(1,2,3)) {
// total += x;
// }
// assert.equal(6, total);
// })
it("finds items", () =>
of(1,2,3).find(x => x >= 2).contains(2));
it("doesn't find if the predicate doesn't match", () =>
of(1,2,3).find(x => x >= 4).isNone());
it("folds correctly", () => assert.equal(
6, of(1,2,3).fold(0, (a,b)=>a+b)));
it("foldsLeft correctly", () => assert.equal(
"cba!",
of("a", "b", "c").foldLeft("!", (xs,x) => x+xs)));
it("foldsRight correctly", () => assert.equal(
"!cba",
of("a", "b", "c").foldRight("!", (x,xs) => xs+x)));
it("handles scanLeft correctly on an empty seq", () => assert.deepEqual(
[0], empty<number>().scanLeft(0, (i,j)=>i+j).toArray()));
it("handles scanRight correctly on an empty seq", () => assert.deepEqual(
[0], empty<number>().scanRight(0, (j,i)=>i+j).toArray()));
it("calls forEach correctly", () => {
let ar: number[] = [];
of(1,2,3).forEach((v:number) => ar.push(v));
assert.deepEqual([1,2,3], ar);
});
it("supports iterator", () => {
let total = 0;
const iterator = of(1,2,3)[Symbol.iterator]();
let curItem = iterator.next();
while (!curItem.done) {
total += curItem.value;
curItem = iterator.next();
}
assert.equal(6, total);
})
});
describe(seqName + " conversions", () => {
it("mkString ok for null too", () => assert.equal(
"null", of(null).mkString(", ")));
it("mkString works", () => assert.equal(
"1, 2, 3", of(1,2,3).mkString(", ")));
it("mkString works for strings too", () => assert.equal(
"a, b, c", of("a","b","c").mkString(", ")));
const nullSeq = of(null);
nullSeq.last(); // needed to force the stream to get loaded
it("toString ok for null too", () => assert.equal(
seqName + "(null)", nullSeq.toString()));
const onetwothree = of(1,2,3);
onetwothree.last(); // needed to force the stream to get loaded
it("toString works", () => assert.equal(
seqName + "(1, 2, 3)", onetwothree.toString()));
const abc = of("a","b","c");
abc.last(); // needed to force the stream to get loaded
it("toString works for strings too", () => assert.equal(
seqName + "('a', 'b', 'c')", abc.toString()));
it("handles circular structures on toString too", () => {
const a: any = {};
a.b = a;
assert.equal(
of({}).toString(), of(a).toString());
})
it("transforms to map", () => {
assert.ok(HashMap.empty<number,string>().put(1,"ok").put(2, "bad")
.equals(<HashMap<number,string>>of<[number,string]>([1,"ok"],[2,"bad"]).toMap(x => x)));
});
it("empty seq transforms to empty set", () => {
assert.ok(HashSet.empty<number>()
.equals(empty<number>().toSet(x => x)));
});
});
describe(seqName + " manipulation", () => {
it("computes the length correctly", () => assert.equal(
3, of(1,2,3).length()));
it("computes the length of the empty seq correctly", () => assert.equal(
0, empty().length()));
it("correctly drops right n items", () => assert.deepEqual(
[1,2,3,4], of(1,2,3,4,5,6).dropRight(2).toArray()));
it("returns an empty seq when dropping right too much", () => assert.deepEqual(
[], of(1,2).dropRight(3).toArray()));
it("doesn't modify the receiver upon drop", () => {
const x = of(1,2,3);
x.drop(2);
assert.deepEqual([1,2,3], x.toArray())
});
it("sortBy sorting works", () => assert.ok(
of(4,3,2,1)
.equals(of(1,2,3,4).sortBy((x,y) => y-x))));
it("sortOn sorting works", () => assert.ok(
of(1,2,3,4)
.equals(of(4,2,3,1).sortOn(x => x))));
it("sortOn sorting works2", () => assert.ok(
of(new MyClass("zz",1),
new MyClass("test", 2),
new MyClass("b",3))
.equals(of(new MyClass("test", 2),
new MyClass("zz", 1),
new MyClass("b",3)).sortOn(x => x.getField2()))));
it("sortOn sorting works with strings too", () => assert.ok(
of(1,2,3,4)
.equals(of(4,2,3,1).sortOn(x => x+""))));
it("sortOn sorting works with strings too 2", () => assert.ok(
of(new MyClass("b",3),
new MyClass("test", 2),
new MyClass("zz",1))
.equals(of(new MyClass("test", 2),
new MyClass("zz", 1),
new MyClass("b",3)).sortOn(x => x.getField1()))));
it("correctly reverses", () => assert.deepEqual(
[3,2,1], of(1,2,3).reverse().toArray()));
it("correctly reverses the empty vector", () => assert.deepEqual(
[], empty().reverse().toArray()));
it("correctly reverses also after prepend", () => assert.deepEqual(
[3,2,1], of(2,3).prepend(1).reverse().toArray()));
it("correctly partitions also after prepend", () => assert.deepEqual(
[[1,3,5,7],[2,4,6,8]],
of(2,3,4,5,6,7,8).prepend(1).partition(x => x%2!==0)
.map(v => v.toArray())));
it("correctly infers the more precise left and right type on partition in case of typeguard", () => {
// just checking that this compiles. 'charAt' is available on strings not numbers.
// 'toExponential' is available on numbers not strings.
const [a,b] = of<string|number>(1,"a")
.partition(typeOf("number"))
a.single().getOrThrow().toExponential(2);
b.single().getOrThrow().charAt(0);
});
it("correctly infers the more precise right type on partition in case of typeguard", () => {
// just checking that this compiles. 'charAt' is available on strings not numbers.
// 'toExponential' is available on numbers not strings.
const [a,b] = of<string|number>(1,"a")
.partition(typeOf("string"))
a.single().getOrThrow().charAt(0);
b.single().getOrThrow().toExponential(2);
});
it("zips with an array", () => assert.deepEqual(
[[1,"a"], [2,"b"]], of(1,2,3).zip(["a","b"]).toArray()));
it("zips with a stream", () => assert.deepEqual(
[["a",0], ["b",1]], of("a","b").zip(Stream.iterate(0,x=>x+1)).toArray()));
it("richer example", () => assert.deepEqual(
[[1,"a"],[2,"b"]], of(1,2,3)
.zip(["a", "b", "c"]).takeWhile(([k,v]) => k<3).toArray()));
it("flatMap works", () => assert.ok(
of(1,2,2,3,3,3,4,4,4,4)
.equals(of(1,2,3,4).flatMap(
x => ofIterable(Array.from(Array(x), ()=>x))))));
it("map works", () => assert.ok(
of(5,6,7).equals(of(1,2,3).map(x=>x+4))));
it("mapOption works", () => assert.deepEqual(
[3,5,7],of(1,2,3,4,5,6).mapOption(
x => x%2==0 ? Option.of(x+1):Option.none<number>()).toArray()));
it("supports append", () => assert.deepEqual(
[1,2,3,4], of(1,2,3).append(4).toArray()));
it("doesn't modify the receiver upon append", () => {
const x = of(1,2,3);
x.append(4);
assert.deepEqual([1,2,3], x.toArray())
});
it("supports appendAll", () => assert.deepEqual(
[1,2,3,4,5], of(1,2,3).appendAll([4,5]).toArray()));
it("doesn't modify the receiver upon appendAll", () => {
const x = of(1,2,3);
x.appendAll([4,5]);
assert.deepEqual([1,2,3], x.toArray())
});
it("supports appendAll of a linkedlist iterable", () => assert.deepEqual(
[1,2,3,4,5], of(1,2,3).appendAll(LinkedList.of(4,5)).toArray()));
it("doesn't modify the receiver upon appendAll of a linkedlist iterable", () => {
const x = of(1,2,3);
x.appendAll(LinkedList.of(4,5));
assert.deepEqual([1,2,3], x.toArray())
});
it("supports appendAll of a vector iterable", () => assert.deepEqual(
[1,2,3,4,5], of(1,2,3).appendAll(Vector.of(4,5)).toArray()));
it("doesn't modify the receiver upon appendAll of a vector iterable", () => {
const x = of(1,2,3);
x.appendAll(Vector.of(4,5));
assert.deepEqual([1,2,3], x.toArray())
});
it("supports take", () => assert.deepEqual(
[1,2,3], of(1,2,3,4,5,6).take(3).toArray()));
it("supports take with an index higher than the length", () =>
assert.deepEqual([1,2,3], of(1,2,3).take(6).toArray()));
it("supports take with a negative index", () =>
assert.deepEqual([], of(1,2,3).take(-1).toArray()));
it("doesn't modify the receiver upon take", () => {
const x = of(1,2,3);
x.take(2);
assert.deepEqual([1,2,3], x.toArray())
});
check.it("applying reverse twice is the identity", gen.array(gen.string), (ar:string[]) => {
assert.deepEqual(ofIterable(ar).reverse().reverse().toArray(),
ofIterable(ar).toArray());
});
check.it("calling take() on the seq is the same as on the array",
gen.array(gen.string), gen.int.suchThat((n:number) => !Number.isNaN(n)), (ar:number[],n:number) => {
assert.deepEqual(ofIterable(ar).take(n).toArray(),
ar.slice(0,Math.max(0,n)));
});
it("correctly implements sliding on a non-empty seq", () => {
assert.deepEqual([[1,2],[3,4],[5,6]], of(1,2,3,4,5,6).sliding(2).map(x => x.toArray()).toArray())
});
it("correctly implements sliding on a non-empty seq with length non-multiple of count", () => {
assert.deepEqual([[1,2,3],[4,5,6],[7]], of(1,2,3,4,5,6,7).sliding(3).map(x => x.toArray()).toArray())
});
it("correctly implements sliding on an empty seq", () => {
assert.deepEqual([], empty().sliding(2).map(x => x.toArray()).toArray())
});
});
describe(seqName + " filtering", () => {
it("filter works", () => assert.ok(
of(2,4)
.equals(of(1,2,3,4).filter(x => x%2 === 0))));
it("filter works with prepend", () => assert.ok(
of(2,4)
.equals(of(3,4).prepend(2).prepend(1).filter(x => x%2 === 0))));
it("filter upcasts if possible - instanceOf", () => {
// here we only want to check that the code does compile,
// that 'x' is correctly seen as MySubclass in the 3rd line
of<MyClass>(new MySubclass("a",1,"b"))
.filter(instanceOf(MySubclass))
.filter(x => x.getField3().length>1);
});
it("filter upcasts if possible - typeOf", () => {
// here we only want to check that the code does compile,
// that 'x' is correctly seen as string in the 3rd line
of<any>(1,3,"a","b")
.filter(typeOf("string"))
.filter(x => x.replace("x","").length > 0);
});
it("distinctBy", () => assert.deepEqual(
[1,2,3], of(1,1,2,3,2,3,1).distinctBy(x => x).toArray()));
it("distinctBy for the empty seq", () => assert.deepEqual(
[], empty<number>().distinctBy(x => x).toArray()));
it("distinctBy for a single value", () => assert.deepEqual(
[1], of(1).distinctBy(x => x).toArray()));
it("distinctBy, custom equality", () => assert.deepEqual(
[1,0,2], of(1,0,1,2,3,2,3,1).distinctBy(x => new MyClass("hi", x%3)).toArray()));
it("distinctBy with prepend", () => assert.deepEqual(
[1,2,3], of(2,3,2,3,1).prepend(1).distinctBy(x => x).toArray()));
it("correctly dropsWhile", () => assert.deepEqual(
[4,3,5,6], of(1,2,3,4,3,5,6).dropWhile(x=>x<4).toArray()));
it("correctly dropsWhile, only the first one is removed", () => assert.deepEqual(
[2,3,4,3,5,6], of(1,2,3,4,3,5,6).dropWhile(x=>x<2).toArray()));
it("correctly dropsWhile, nothing matches", () => assert.deepEqual(
[], of(1,2,3,4,3,5,6).dropWhile(x=>x>=0).toArray()));
it("correctly dropsWhile, everything matches", () => assert.deepEqual(
[1,2,3,4,3,5,6], of(1,2,3,4,3,5,6).dropWhile(x=>x<0).toArray()));
it("correctly dropsRightWhile", () => assert.deepEqual(
[1,4,2,3], of(1,4,2,3,4,5,6).dropRightWhile(x=>x>=4).toArray()));
it("correctly dropsRightWhile, only the last one is removed", () => assert.deepEqual(
[1,4,2,3,4,5], of(1,4,2,3,4,5,6).dropRightWhile(x=>x>5).toArray()));
it("correctly dropsRightWhile, nothing matches", () => assert.deepEqual(
[], of(1,4,2,3,4,5,6).dropRightWhile(x=>x>=0).toArray()));
it("correctly dropsRightWhile, everything matches", () => assert.deepEqual(
[1,4,2,3,4,5,6], of(1,4,2,3,4,5,6).dropRightWhile(x=>x<0).toArray()));
it("correctly drops n items", () => assert.deepEqual(
[4,5,6], of(1,2,3,4,5,6).drop(3).toArray()));
it("returns an empty stream when dropping too much", () => assert.deepEqual(
[], of(1,2).drop(3).toArray()));
it("calculates removeFirst well", () => assert.deepEqual(
[0,1,3,4], of(0,1,2,3,4).removeFirst(Predicate.isIn([3,2])).toArray()));
it("calculates removeFirst well event if item not present", () => assert.deepEqual(
[0,1,2,3,4], of(0,1,2,3,4).removeFirst(Predicate.equals(5)).toArray()));
it("calculates removeFirst from empty well", () => assert.ok(
empty<number>().equals(empty<number>().removeFirst(x => x === 3))));
it("calculates removeAll from empty well", () => assert.ok(
empty<number>().equals(empty<number>().removeAll([2,3]))));
it("handles a simple splitAt", () => assert.deepEqual(
[[1,2,3],[4,5]], of(1,2,3,4,5).splitAt(3).map(x=>x.toArray())));
it("handles a simple splitAt 2", () => assert.deepEqual(
[[1],[2,3]], of(1,2,3).splitAt(1).map(x=>x.toArray())));
it("handles splitAt with empty second list", () => assert.deepEqual(
[[1,2,3],[]], of(1,2,3).splitAt(3).map(x=>x.toArray())));
it("handles splitAt with empty first list", () => assert.deepEqual(
[[],[1,2,3]], of(1,2,3).splitAt(0).map(x=>x.toArray())));
it("handles splitAt with out of bounds 1", () => assert.deepEqual(
[[],[1,2,3]], of(1,2,3).splitAt(-1).map(x=>x.toArray())));
it("handles splitAt with out of bounds 2", () => assert.deepEqual(
[[1,2,3],[]], of(1,2,3).splitAt(4).map(x=>x.toArray())));
it("handles a regular span", () => assert.deepEqual(
[[1,2],[3,4,5]], of(1,2,3,4,5).span(i=>i<3).map(x=>x.toArray())));
it("handles a span matching everything", () => assert.deepEqual(
[[1,2,3,4,5],[]], of(1,2,3,4,5).span(i=>i<9).map(x=>x.toArray())));
it("handles a span matching nothing", () => assert.deepEqual(
[[],[1,2,3,4,5]], of(1,2,3,4,5).span(i=>i<0).map(x=>x.toArray())));
});
describe(seqName + " value extraction", () => {
it("get finds when present", () => assert.ok(
Option.of(5).equals(of(1,2,3,4,5,6).get(4))));
it("get finds when present after prepend", () => assert.ok(
Option.of(5).equals(of(2,3,4,5,6).prepend(1).get(4))));
it("get doesn't find when stream too short", () => assert.ok(
Option.none<number>().equals(of(1,2,3).get(4))));
it("get doesn't find when negative index", () => assert.ok(
Option.none<number>().equals(of(1,2,3).get(-1))));
it("correctly gets the tail of the empty vector", () => assert.ok(
empty().tail().isNone()));
it("correctly gets the tail of a simple vector", () => assert.ok(
of(2,3,4).equals(of(1,2,3,4).tail().getOrThrow())));
it("correctly gets the tail of a vector after prepend", () => assert.ok(
of(2,3,4).equals(of(2,3,4).prepend(1).tail().getOrThrow())));
it("gets the last value correctly", () => assert.equal(
3, of(1,2,3).last().getOrThrow()));
it("gets the last value correctly for an empty seq", () => assert.ok(
empty().last().isNone()));
it("correctly gets the last element also after prepend", () => assert.equal(
5, of(4,5).prependAll(of(1,2,3)).last().getOrUndefined()));
it("correctly gets the first element", () => assert.equal(
1, of(1,2,3,4,5).head().getOrUndefined()));
it("correctly gets the first element of an empty vector", () => assert.ok(
empty().head().isNone()));
it("correctly gets the first element also after prepend", () => assert.equal(
1, of(4,5).prependAll(of(1,2,3)).head().getOrUndefined()));
it("correct returns single positive case", () => assert.equal(
5, of(5).single().getOrThrow()));
it("correct returns single negative case", () => assert.ok(
of(5,6).single().isNone()));
it("correct returns single empty seq", () => assert.ok(
empty().single().isNone()));
});
describe(seqName + " supports shuffle", () => {
const original = of(1,2,3,4,5,6,7,2,9,10);
const shuffle1 = original.shuffle();
const shuffle2 = original.shuffle();
const shuffle3 = original.shuffle();
const shuffle4 = original.shuffle();
it("should conserve all the items1", () => assert.deepEqual(
original.sortOn(x=>x).toArray(), shuffle1.sortOn(x=>x).toArray()));
it("should conserve all the items2", () => assert.deepEqual(
original.sortOn(x=>x).toArray(), shuffle2.sortOn(x=>x).toArray()));
it("should conserve all the items3", () => assert.deepEqual(
original.sortOn(x=>x).toArray(), shuffle3.sortOn(x=>x).toArray()));
it("should conserve all the items4", () => assert.deepEqual(
original.sortOn(x=>x).toArray(), shuffle4.sortOn(x=>x).toArray()));
it("should change the order of elements", () => assert.equal(false,
original.equals(shuffle1) &&
original.equals(shuffle2) &&
original.equals(shuffle3) &&
original.equals(shuffle4)))
});
const list = of({a:1, b:"aa"}, {a:2, b:"aa"}, {a:1, b:"ba"});
describe(seqName + " multiple criteria sorting", () => {
it ("sorts normally, descending on b", () => {
assert.deepEqual([{a:1,b:"ba"},{a:1,b:"aa"},{a:2,b:"aa"}], list.sortOn(x=>x.a,{desc:x=>x.b}).toArray());
});
it ("sorts normally ascending on b", () => {
assert.deepEqual([{a:1,b:"aa"},{a:1,b:"ba"},{a:2,b:"aa"}], list.sortOn(x=>x.a, x=>x.b).toArray());
});
it ("sorts normally descending on a", () => {
assert.deepEqual([{a:2,b:"aa"},{a:1,b:"aa"},{a:1,b:"ba"}], list.sortOn({desc:x=>x.a},x=>x.b).toArray());
});
it ("sorts normally descending on both", () => {
assert.deepEqual([{a:2,b:"aa"},{a:1,b:"ba"},{a:1,b:"aa"}], list.sortOn({desc:x=>x.a},{desc:x=>x.b}).toArray());
});
});
const list1 = of({a:1, b:true}, {a:2, b:false}, {a:1, b:false});
describe(seqName + " multiple criteria sorting (including booleans)", () => {
it ("sorts normally, descending on b", () => {
assert.deepEqual([{a:1,b:true},{a:1,b:false},{a:2,b:false}], list1.sortOn(x=>x.a,{desc:x=>x.b}).toArray());
});
it ("sorts normally ascending on b", () => {
assert.deepEqual([{a:1,b:false},{a:2,b:false},{a:1,b:true}], list1.sortOn(x=>x.b, x=>x.a).toArray());
});
});
}
describe("Seq fuzzer", () => {
const testsToRun = 2000;
const opsToRun = 64;
const randomArrayMaxLength = 256;
const getRandomArray = () => Stream.iterate(0,i=>i+1)
.take(Math.random()*randomArrayMaxLength)
.toArray();
const getRandomVal = () => Math.round(Math.random()*randomArrayMaxLength);
const fuzOps: (()=>[string,(x:Seq<number>)=>Seq<number>])[] = [
() => {
const arr = getRandomArray();
return ["prepend " + arr.length, s => s.prependAll(arr)];
},
() => ["reverse", s => s.reverse()],
() => {
const arr = getRandomArray();
return ["append " + arr.length, s => s.appendAll(arr)];
},
() => {
const count = getRandomVal();
return ["take " + count, s => s.take(count)];
},
() => {
const count = getRandomVal();
return ["drop " + count, s => s.drop(count)];
},
() => ["map *2", s => s.map(x=>x*2)],
() => {
const val = getRandomVal();
return ["filter >=" + val, s => <any>s.filter(x=>x>=val)];
}
];
it("should pass the fuzzer", () => {
for (let testIdx=0;testIdx<testsToRun;testIdx++) {
if (testIdx % 1000 === 0) {
console.log(`fuzzer: ran ${testIdx} tests`);
}
let vec: Seq<number> = Vector.empty<number>();
let llist: Seq<number> = LinkedList.empty<number>();
let stream: Seq<number> = Stream.empty<number>();
const ops:string[] = [];
for (let opIdx=0;opIdx<opsToRun;opIdx++) {
const opIdx = Math.round(Math.random()*(fuzOps.length-1));
const [opDesc, opFn] = fuzOps[opIdx]();
ops.push(opDesc);
const checkSeq = (desc: string, seq:Seq<number>, updateSeq:(val:Seq<number>)=>void) => {
const previousSeqAr = seq.toArray();
const previousSeq = seq;
try {
updateSeq(opFn(seq));
} catch (ex) {
console.error(`*** ${desc}: got exception`);
console.error(ops);
throw ex;
}
if (previousSeq.toArray().toString() !== previousSeqAr.toString()) {
console.error(`*** ${desc} BUG previous ${desc} was modified`);
console.error(ops);
assert.deepEqual(previousSeqAr, previousSeq.toArray());
}
};
checkSeq("Vector", vec, n=>{vec=n});
checkSeq("LinkedList", llist, n=>{llist=n});
checkSeq("Stream", stream, n=>{stream=n});
if (vec.toArray().toString() !== llist.toArray().toString()) {
console.error("*** vector/llist BUG");
console.error(ops);
assert.deepEqual(vec.toArray(), llist.toArray());
}
if (stream.toArray().toString() !== llist.toArray().toString()) {
console.error("*** stream/llist BUG");
console.error(ops);
assert.deepEqual(stream.toArray(), llist.toArray());
}
}
}
});
}); | the_stack |
import * as msRest from "@azure/ms-rest-js";
import * as msRestAzure from "@azure/ms-rest-azure-js";
import * as Models from "../models";
import * as Mappers from "../models/privateEndpointConnectionsMappers";
import * as Parameters from "../models/parameters";
import { KeyVaultManagementClientContext } from "../keyVaultManagementClientContext";
/** Class representing a PrivateEndpointConnections. */
export class PrivateEndpointConnections {
private readonly client: KeyVaultManagementClientContext;
/**
* Create a PrivateEndpointConnections.
* @param {KeyVaultManagementClientContext} client Reference to the service client.
*/
constructor(client: KeyVaultManagementClientContext) {
this.client = client;
}
/**
* Gets the specified private endpoint connection associated with the key vault.
* @param resourceGroupName Name of the resource group that contains the key vault.
* @param vaultName The name of the key vault.
* @param privateEndpointConnectionName Name of the private endpoint connection associated with the
* key vault.
* @param [options] The optional parameters
* @returns Promise<Models.PrivateEndpointConnectionsGetResponse>
*/
get(resourceGroupName: string, vaultName: string, privateEndpointConnectionName: string, options?: msRest.RequestOptionsBase): Promise<Models.PrivateEndpointConnectionsGetResponse>;
/**
* @param resourceGroupName Name of the resource group that contains the key vault.
* @param vaultName The name of the key vault.
* @param privateEndpointConnectionName Name of the private endpoint connection associated with the
* key vault.
* @param callback The callback
*/
get(resourceGroupName: string, vaultName: string, privateEndpointConnectionName: string, callback: msRest.ServiceCallback<Models.PrivateEndpointConnection>): void;
/**
* @param resourceGroupName Name of the resource group that contains the key vault.
* @param vaultName The name of the key vault.
* @param privateEndpointConnectionName Name of the private endpoint connection associated with the
* key vault.
* @param options The optional parameters
* @param callback The callback
*/
get(resourceGroupName: string, vaultName: string, privateEndpointConnectionName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.PrivateEndpointConnection>): void;
get(resourceGroupName: string, vaultName: string, privateEndpointConnectionName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.PrivateEndpointConnection>, callback?: msRest.ServiceCallback<Models.PrivateEndpointConnection>): Promise<Models.PrivateEndpointConnectionsGetResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
vaultName,
privateEndpointConnectionName,
options
},
getOperationSpec,
callback) as Promise<Models.PrivateEndpointConnectionsGetResponse>;
}
/**
* Updates the specified private endpoint connection associated with the key vault.
* @param resourceGroupName Name of the resource group that contains the key vault.
* @param vaultName The name of the key vault.
* @param privateEndpointConnectionName Name of the private endpoint connection associated with the
* key vault.
* @param properties The intended state of private endpoint connection.
* @param [options] The optional parameters
* @returns Promise<Models.PrivateEndpointConnectionsPutResponse>
*/
put(resourceGroupName: string, vaultName: string, privateEndpointConnectionName: string, properties: Models.PrivateEndpointConnection, options?: msRest.RequestOptionsBase): Promise<Models.PrivateEndpointConnectionsPutResponse>;
/**
* @param resourceGroupName Name of the resource group that contains the key vault.
* @param vaultName The name of the key vault.
* @param privateEndpointConnectionName Name of the private endpoint connection associated with the
* key vault.
* @param properties The intended state of private endpoint connection.
* @param callback The callback
*/
put(resourceGroupName: string, vaultName: string, privateEndpointConnectionName: string, properties: Models.PrivateEndpointConnection, callback: msRest.ServiceCallback<Models.PrivateEndpointConnection>): void;
/**
* @param resourceGroupName Name of the resource group that contains the key vault.
* @param vaultName The name of the key vault.
* @param privateEndpointConnectionName Name of the private endpoint connection associated with the
* key vault.
* @param properties The intended state of private endpoint connection.
* @param options The optional parameters
* @param callback The callback
*/
put(resourceGroupName: string, vaultName: string, privateEndpointConnectionName: string, properties: Models.PrivateEndpointConnection, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.PrivateEndpointConnection>): void;
put(resourceGroupName: string, vaultName: string, privateEndpointConnectionName: string, properties: Models.PrivateEndpointConnection, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.PrivateEndpointConnection>, callback?: msRest.ServiceCallback<Models.PrivateEndpointConnection>): Promise<Models.PrivateEndpointConnectionsPutResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
vaultName,
privateEndpointConnectionName,
properties,
options
},
putOperationSpec,
callback) as Promise<Models.PrivateEndpointConnectionsPutResponse>;
}
/**
* Deletes the specified private endpoint connection associated with the key vault.
* @param resourceGroupName Name of the resource group that contains the key vault.
* @param vaultName The name of the key vault.
* @param privateEndpointConnectionName Name of the private endpoint connection associated with the
* key vault.
* @param [options] The optional parameters
* @returns Promise<Models.PrivateEndpointConnectionsDeleteResponse>
*/
deleteMethod(resourceGroupName: string, vaultName: string, privateEndpointConnectionName: string, options?: msRest.RequestOptionsBase): Promise<Models.PrivateEndpointConnectionsDeleteResponse> {
return this.beginDeleteMethod(resourceGroupName,vaultName,privateEndpointConnectionName,options)
.then(lroPoller => lroPoller.pollUntilFinished()) as Promise<Models.PrivateEndpointConnectionsDeleteResponse>;
}
/**
* Deletes the specified private endpoint connection associated with the key vault.
* @param resourceGroupName Name of the resource group that contains the key vault.
* @param vaultName The name of the key vault.
* @param privateEndpointConnectionName Name of the private endpoint connection associated with the
* key vault.
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginDeleteMethod(resourceGroupName: string, vaultName: string, privateEndpointConnectionName: string, options?: msRest.RequestOptionsBase): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
resourceGroupName,
vaultName,
privateEndpointConnectionName,
options
},
beginDeleteMethodOperationSpec,
options);
}
}
// Operation Specifications
const serializer = new msRest.Serializer(Mappers);
const getOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/vaults/{vaultName}/privateEndpointConnections/{privateEndpointConnectionName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.vaultName0,
Parameters.privateEndpointConnectionName
],
queryParameters: [
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.PrivateEndpointConnection
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const putOperationSpec: msRest.OperationSpec = {
httpMethod: "PUT",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/vaults/{vaultName}/privateEndpointConnections/{privateEndpointConnectionName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.vaultName0,
Parameters.privateEndpointConnectionName
],
queryParameters: [
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "properties",
mapper: {
...Mappers.PrivateEndpointConnection,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.PrivateEndpointConnection,
headersMapper: Mappers.PrivateEndpointConnectionsPutHeaders
},
default: {
bodyMapper: Mappers.CloudError,
headersMapper: Mappers.PrivateEndpointConnectionsPutHeaders
}
},
serializer
};
const beginDeleteMethodOperationSpec: msRest.OperationSpec = {
httpMethod: "DELETE",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/vaults/{vaultName}/privateEndpointConnections/{privateEndpointConnectionName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.vaultName0,
Parameters.privateEndpointConnectionName
],
queryParameters: [
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.PrivateEndpointConnection,
headersMapper: Mappers.PrivateEndpointConnectionsDeleteHeaders
},
202: {
headersMapper: Mappers.PrivateEndpointConnectionsDeleteHeaders
},
204: {
headersMapper: Mappers.PrivateEndpointConnectionsDeleteHeaders
},
default: {
bodyMapper: Mappers.CloudError,
headersMapper: Mappers.PrivateEndpointConnectionsDeleteHeaders
}
},
serializer
}; | the_stack |
import * as ts from "typescript";
import * as lua from "../../../LuaAST";
import { cast, assertNever } from "../../../utils";
import { TransformationContext } from "../../context";
import { transformInPrecedingStatementScope } from "../../utils/preceding-statements";
import { transformBinaryOperation } from "../binary-expression";
import { transformAssignmentWithRightPrecedingStatements } from "./assignments";
function isLuaExpressionWithSideEffect(expression: lua.Expression) {
return !(lua.isLiteral(expression) || lua.isIdentifier(expression));
}
function shouldCacheTableIndexExpressions(
expression: lua.TableIndexExpression,
rightPrecedingStatements: lua.Statement[]
) {
return (
isLuaExpressionWithSideEffect(expression.table) ||
isLuaExpressionWithSideEffect(expression.index) ||
rightPrecedingStatements.length > 0
);
}
// TODO: `as const` doesn't work on enum members
type CompoundAssignmentToken =
| ts.SyntaxKind.BarToken
| ts.SyntaxKind.PlusToken
| ts.SyntaxKind.CaretToken
| ts.SyntaxKind.MinusToken
| ts.SyntaxKind.SlashToken
| ts.SyntaxKind.PercentToken
| ts.SyntaxKind.AsteriskToken
| ts.SyntaxKind.AmpersandToken
| ts.SyntaxKind.AsteriskAsteriskToken
| ts.SyntaxKind.LessThanLessThanToken
| ts.SyntaxKind.GreaterThanGreaterThanToken
| ts.SyntaxKind.GreaterThanGreaterThanGreaterThanToken
| ts.SyntaxKind.BarBarToken
| ts.SyntaxKind.AmpersandAmpersandToken
| ts.SyntaxKind.QuestionQuestionToken;
const compoundToAssignmentTokens: Record<ts.CompoundAssignmentOperator, CompoundAssignmentToken> = {
[ts.SyntaxKind.BarEqualsToken]: ts.SyntaxKind.BarToken,
[ts.SyntaxKind.PlusEqualsToken]: ts.SyntaxKind.PlusToken,
[ts.SyntaxKind.CaretEqualsToken]: ts.SyntaxKind.CaretToken,
[ts.SyntaxKind.MinusEqualsToken]: ts.SyntaxKind.MinusToken,
[ts.SyntaxKind.SlashEqualsToken]: ts.SyntaxKind.SlashToken,
[ts.SyntaxKind.PercentEqualsToken]: ts.SyntaxKind.PercentToken,
[ts.SyntaxKind.AsteriskEqualsToken]: ts.SyntaxKind.AsteriskToken,
[ts.SyntaxKind.AmpersandEqualsToken]: ts.SyntaxKind.AmpersandToken,
[ts.SyntaxKind.AsteriskAsteriskEqualsToken]: ts.SyntaxKind.AsteriskAsteriskToken,
[ts.SyntaxKind.LessThanLessThanEqualsToken]: ts.SyntaxKind.LessThanLessThanToken,
[ts.SyntaxKind.GreaterThanGreaterThanEqualsToken]: ts.SyntaxKind.GreaterThanGreaterThanToken,
[ts.SyntaxKind.GreaterThanGreaterThanGreaterThanEqualsToken]: ts.SyntaxKind.GreaterThanGreaterThanGreaterThanToken,
[ts.SyntaxKind.BarBarEqualsToken]: ts.SyntaxKind.BarBarToken,
[ts.SyntaxKind.AmpersandAmpersandEqualsToken]: ts.SyntaxKind.AmpersandAmpersandToken,
[ts.SyntaxKind.QuestionQuestionEqualsToken]: ts.SyntaxKind.QuestionQuestionToken,
};
export const isCompoundAssignmentToken = (token: ts.BinaryOperator): token is ts.CompoundAssignmentOperator =>
token in compoundToAssignmentTokens;
export const unwrapCompoundAssignmentToken = (token: ts.CompoundAssignmentOperator): CompoundAssignmentToken =>
compoundToAssignmentTokens[token];
export function transformCompoundAssignment(
context: TransformationContext,
expression: ts.Expression,
lhs: ts.Expression,
rhs: ts.Expression,
operator: CompoundAssignmentToken,
isPostfix: boolean
) {
const left = cast(context.transformExpression(lhs), lua.isAssignmentLeftHandSideExpression);
const [rightPrecedingStatements, right] = transformInPrecedingStatementScope(context, () =>
context.transformExpression(rhs)
);
if (lua.isTableIndexExpression(left) && shouldCacheTableIndexExpressions(left, rightPrecedingStatements)) {
// Complex property/element accesses need to cache object/index expressions to avoid repeating side-effects
// local __obj, __index = ${objExpression}, ${indexExpression};
const obj = context.createTempNameForLuaExpression(left.table);
const index = context.createTempNameForLuaExpression(left.index);
const objAndIndexDeclaration = lua.createVariableDeclarationStatement([obj, index], [left.table, left.index]);
const accessExpression = lua.createTableIndexExpression(obj, index);
const tmp = context.createTempNameForLuaExpression(left);
if (isPostfix) {
// local ____tmp = ____obj[____index];
// ____obj[____index] = ____tmp ${replacementOperator} ${right};
// return ____tmp
const tmpDeclaration = lua.createVariableDeclarationStatement(tmp, accessExpression);
const [precedingStatements, operatorExpression] = transformBinaryOperation(
context,
tmp,
right,
rightPrecedingStatements,
operator,
expression
);
const assignStatement = lua.createAssignmentStatement(accessExpression, operatorExpression);
return {
statements: [objAndIndexDeclaration, ...precedingStatements, tmpDeclaration, assignStatement],
result: tmp,
};
} else {
// local ____tmp = ____obj[____index] ${replacementOperator} ${right};
// ____obj[____index] = ____tmp;
// return ____tmp
const [precedingStatements, operatorExpression] = transformBinaryOperation(
context,
accessExpression,
right,
rightPrecedingStatements,
operator,
expression
);
const tmpDeclaration = lua.createVariableDeclarationStatement(tmp, operatorExpression);
const assignStatement = lua.createAssignmentStatement(accessExpression, tmp);
return {
statements: [objAndIndexDeclaration, ...precedingStatements, tmpDeclaration, assignStatement],
result: tmp,
};
}
} else if (isPostfix) {
// Postfix expressions need to cache original value in temp
// local ____tmp = ${left};
// ${left} = ____tmp ${replacementOperator} ${right};
// return ____tmp
const tmpIdentifier = context.createTempNameForLuaExpression(left);
const tmpDeclaration = lua.createVariableDeclarationStatement(tmpIdentifier, left);
const [precedingStatements, operatorExpression] = transformBinaryOperation(
context,
tmpIdentifier,
right,
rightPrecedingStatements,
operator,
expression
);
const assignStatements = transformAssignmentWithRightPrecedingStatements(
context,
lhs,
operatorExpression,
rightPrecedingStatements
);
return { statements: [tmpDeclaration, ...precedingStatements, ...assignStatements], result: tmpIdentifier };
} else if (ts.isPropertyAccessExpression(lhs) || ts.isElementAccessExpression(lhs)) {
// Simple property/element access expressions need to cache in temp to avoid double-evaluation
// local ____tmp = ${left} ${replacementOperator} ${right};
// ${left} = ____tmp;
// return ____tmp
const tmpIdentifier = context.createTempNameForLuaExpression(left);
const [precedingStatements, operatorExpression] = transformBinaryOperation(
context,
left,
right,
rightPrecedingStatements,
operator,
expression
);
const tmpDeclaration = lua.createVariableDeclarationStatement(tmpIdentifier, operatorExpression);
if (isSetterSkippingCompoundAssignmentOperator(operator)) {
const statements = [
tmpDeclaration,
...transformSetterSkippingCompoundAssignment(tmpIdentifier, operator, right, precedingStatements),
];
return { statements, result: tmpIdentifier };
}
const assignStatements = transformAssignmentWithRightPrecedingStatements(
context,
lhs,
tmpIdentifier,
precedingStatements
);
return { statements: [tmpDeclaration, ...assignStatements], result: tmpIdentifier };
} else {
if (rightPrecedingStatements.length > 0 && isSetterSkippingCompoundAssignmentOperator(operator)) {
return {
statements: transformSetterSkippingCompoundAssignment(left, operator, right, rightPrecedingStatements),
result: left,
};
}
// Simple expressions
// ${left} = ${left} ${operator} ${right}
const [precedingStatements, operatorExpression] = transformBinaryOperation(
context,
left,
right,
rightPrecedingStatements,
operator,
expression
);
const statements = transformAssignmentWithRightPrecedingStatements(
context,
lhs,
operatorExpression,
precedingStatements
);
return { statements, result: left };
}
}
export function transformCompoundAssignmentExpression(
context: TransformationContext,
expression: ts.Expression,
// TODO: Change type to ts.LeftHandSideExpression?
lhs: ts.Expression,
rhs: ts.Expression,
operator: CompoundAssignmentToken,
isPostfix: boolean
): lua.Expression {
const { statements, result } = transformCompoundAssignment(context, expression, lhs, rhs, operator, isPostfix);
context.addPrecedingStatements(statements);
return result;
}
export function transformCompoundAssignmentStatement(
context: TransformationContext,
node: ts.Node,
lhs: ts.Expression,
rhs: ts.Expression,
operator: CompoundAssignmentToken
): lua.Statement[] {
const left = cast(context.transformExpression(lhs), lua.isAssignmentLeftHandSideExpression);
let [rightPrecedingStatements, right] = transformInPrecedingStatementScope(context, () =>
context.transformExpression(rhs)
);
if (lua.isTableIndexExpression(left) && shouldCacheTableIndexExpressions(left, rightPrecedingStatements)) {
// Complex property/element accesses need to cache object/index expressions to avoid repeating side-effects
// local __obj, __index = ${objExpression}, ${indexExpression};
// ____obj[____index] = ____obj[____index] ${replacementOperator} ${right};
const obj = context.createTempNameForLuaExpression(left.table);
const index = context.createTempNameForLuaExpression(left.index);
const objAndIndexDeclaration = lua.createVariableDeclarationStatement([obj, index], [left.table, left.index]);
const accessExpression = lua.createTableIndexExpression(obj, index);
if (isSetterSkippingCompoundAssignmentOperator(operator)) {
return [
objAndIndexDeclaration,
...transformSetterSkippingCompoundAssignment(
accessExpression,
operator,
right,
rightPrecedingStatements,
node
),
];
}
let operatorExpression: lua.Expression;
[rightPrecedingStatements, operatorExpression] = transformBinaryOperation(
context,
accessExpression,
right,
rightPrecedingStatements,
operator,
node
);
const assignStatement = lua.createAssignmentStatement(accessExpression, operatorExpression);
return [objAndIndexDeclaration, ...rightPrecedingStatements, assignStatement];
} else {
if (isSetterSkippingCompoundAssignmentOperator(operator)) {
return transformSetterSkippingCompoundAssignment(left, operator, right, rightPrecedingStatements, node);
}
// Simple statements
// ${left} = ${left} ${replacementOperator} ${right}
let operatorExpression: lua.Expression;
[rightPrecedingStatements, operatorExpression] = transformBinaryOperation(
context,
left,
right,
rightPrecedingStatements,
operator,
node
);
return transformAssignmentWithRightPrecedingStatements(
context,
lhs,
operatorExpression,
rightPrecedingStatements
);
}
}
/* These setter-skipping operators will not execute the setter if result does not change.
* x.y ||= z does NOT call the x.y setter if x.y is already true.
* x.y &&= z does NOT call the x.y setter if x.y is already false.
* x.y ??= z does NOT call the x.y setter if x.y is already not nullish.
*/
type SetterSkippingCompoundAssignmentOperator = ts.LogicalOperator | ts.SyntaxKind.QuestionQuestionToken;
function isSetterSkippingCompoundAssignmentOperator(
operator: ts.BinaryOperator
): operator is SetterSkippingCompoundAssignmentOperator {
return (
operator === ts.SyntaxKind.AmpersandAmpersandToken ||
operator === ts.SyntaxKind.BarBarToken ||
operator === ts.SyntaxKind.QuestionQuestionToken
);
}
function transformSetterSkippingCompoundAssignment(
lhs: lua.AssignmentLeftHandSideExpression,
operator: SetterSkippingCompoundAssignmentOperator,
right: lua.Expression,
rightPrecedingStatements: lua.Statement[],
node?: ts.Node
): lua.Statement[] {
// These assignments have the form 'if x then y = z', figure out what condition x is first.
let condition: lua.Expression;
if (operator === ts.SyntaxKind.AmpersandAmpersandToken) {
condition = lhs;
} else if (operator === ts.SyntaxKind.BarBarToken) {
condition = lua.createUnaryExpression(lhs, lua.SyntaxKind.NotOperator);
} else if (operator === ts.SyntaxKind.QuestionQuestionToken) {
condition = lua.createBinaryExpression(lhs, lua.createNilLiteral(), lua.SyntaxKind.EqualityOperator);
} else {
assertNever(operator);
}
// if condition then lhs = rhs end
return [
lua.createIfStatement(
condition,
lua.createBlock([...rightPrecedingStatements, lua.createAssignmentStatement(lhs, right, node)]),
undefined,
node
),
];
} | the_stack |
namespace PE.Battle {
interface BattlerEffects {
AquaRing?: boolean;
Attract?: number;
BatonPass?: boolean;
Bide?: number;
BideDamage?: number;
BideTarget?: number;
Charge?: number;
/** The move id lock by choice band */
ChoiceBand?: MOVEDEX;
Confusion?: number;
Counter?: number;
CounterTarget?: number;
Curse?: boolean;
DefenseCurl?: boolean;
DestinyBond?: boolean;
Disable?: number;
DisableMove?: number;
Electrify?: boolean;
Embargo?: number;
Encore?: number;
EncoreMoveId?: MOVEDEX;
EncoreMove?: number;
Endure?: boolean;
FirstPledge?: number;
FlashFire?: boolean;
Flinch?: boolean;
FocusEnergy?: number;
FollowMe?: number;
Foresight?: boolean;
FuryCutter?: number;
FutureSight?: number;
FutureSightMove?: number;
FutureSightUser?: number;
FutureSightUserPos?: number;
GastroAcid?: boolean;
Grudge?: boolean;
HealBlock?: number;
HealingWish?: boolean;
HelpingHand?: boolean;
HyperBeam?: number;
Illusion?: Pokemon.Pokemon;
Imprison?: boolean;
Ingrain?: boolean;
KingsShield?: boolean;
LeechSeed?: number;
LifeOrb?: boolean;
LockOn?: number;
LockOnPos?: number;
LunarDance?: boolean;
MagicCoat?: boolean;
MagnetRise?: number;
MeanLook?: number;
MeFirst?: boolean;
Metronome?: number;
MicleBerry?: boolean;
Minimize?: boolean;
MiracleEye?: boolean;
MirrorCoat?: number;
MirrorCoatTarget?: number;
MoveNext?: boolean;
MudSport?: boolean;
MultiTurn?: number;
MultiTurnAttack?: number;
MultiTurnUser?: number;
Nightmare?: boolean;
Outrage?: number;
ParentalBond?: number;
PerishSong?: number;
PerishSongUser?: number;
PickupItem?: string;
PickupUse?: string;
Pinch?: boolean;
Powder?: boolean;
PowerTrick?: boolean;
Protect?: boolean;
ProtectNegation?: boolean;
ProtectRate?: number;
Pursuit?: boolean;
Quash?: boolean;
Rage?: boolean;
Roar?: boolean;
Revenge?: number;
Rollout?: number;
Roost?: boolean;
SkipTurn?: boolean;
SkyDrop?: boolean;
SmackDown?: boolean;
Snatch?: boolean;
SpikyShield?: boolean;
Stockpile?: number;
StockpileDef?: number;
StockpileSpDef?: number;
Substitute?: number;
Taunt?: number;
Telekinesis?: number;
Torment?: boolean;
Toxic?: number;
Transform?: boolean;
/** add to the effects test*/
Truant?: boolean;
TwoTurnAttack?: number;
Type3?: Types;
/** check if an item is used or lost.*/
Unburden?: boolean;
Uproar?: number;
Uturn?: boolean;
WaterSport?: boolean;
/** aumont of weigth add the normal Pokémon weight */
WeightChange?: number;
Wish?: number;
WishAmount?: number;
WishMaker?: number;
Yawn?: number;
}
const STAGE_MULT = {
"-6": 0.2,
"-5": 0.28,
"-4": 0.33,
"-3": 0.4,
"-2": 0.5,
"-1": 0.67,
"0": 1,
"1": 1.5,
"2": 2,
"3": 2.5,
"4": 3,
"5": 3.5,
"6": 4
};
export class Battler {
hpbar: UI.HPBar;
private _hp = 0;
private _totalHP = 0;
private _attack: number;
private _defense: number;
private _spatk: number;
private _spdef: number;
private _speed: number;
private _fainted = false;
private _captured = false;
nickname: string;
totalhp: number;
ability: ABILITYDEX;
damageState: DamageState;
effects: BattlerEffects;
forme: any;
gender: any;
item: string;
itemInitial: string;
ivs: { hp: number; atk: number; def: number; spa: number; spd: number; spe: number };
lastAttacker: {};
lastHPLost: number;
lastMoveUsed: number;
lastMoveUsedSketch: number;
lastMoveUsedType: number;
lastRoundMoved: number;
level: number;
movesUsed: {};
moveset: Moves.Move[];
sides: { own: ActiveSide; foe: ActiveSide } = { own: undefined, foe: undefined };
/** Participants will earn Exp. Points if this battler is defeated */
participants: number[];
species: string;
_status: Statuses;
statusCount: number;
stages: { HP?; Attack?; Defense?; Speed?; SpAtk?; SpDef?; Accuracy?; Evasion? };
tookDamage: boolean;
turncount: number;
types: string[];
// partner: Battler;
constructor(public pokemon: Pokemon.Pokemon, public index: number) {
this._hp = 0;
this._totalHP = 0;
this._fainted = true;
this._captured = false;
this.damageState = new DamageState();
this.effects = {};
this.stages = {};
this.initPokemon(pokemon);
this.initEffects(false);
this.initPermanentEffects();
}
initPokemon(pokemon: Pokemon.Pokemon) {
// if (pokemon.isEgg()) throw Error("An egg can't be an active Pokémon");
this.nickname = pokemon.name;
this.species = pokemon.species;
this.level = pokemon.level;
this._hp = pokemon.stats.hp;
this._totalHP = pokemon.stats.hp;
this.gender = pokemon.gender;
this.ability = pokemon.ability;
this.item = pokemon.item;
this.types = pokemon.types;
this.forme = pokemon.forme;
this._attack = pokemon.stats.atk;
this._defense = pokemon.stats.def;
this._spatk = pokemon.stats.spa;
this._spdef = pokemon.stats.spd;
this._speed = pokemon.stats.spe;
this.status = pokemon.status;
this.statusCount = pokemon.statusCount;
this.totalhp = pokemon.stats.hp;
this.participants = [];
this.moveset = pokemon.moveset;
this.ivs = pokemon.ivs;
}
initEffects(batonpass: boolean) {
if (!batonpass) {
// These effects are retained if Baton Pass is used
this.stages.Attack = 0;
this.stages.Defense = 0;
this.stages.Speed = 0;
this.stages.SpAtk = 0;
this.stages.SpDef = 0;
this.stages.Evasion = 0;
this.stages.Accuracy = 0;
this.lastMoveUsedSketch = -1;
this.effects.AquaRing = false;
this.effects.Confusion = 0;
this.effects.Curse = false;
this.effects.Embargo = 0;
this.effects.FocusEnergy = 0;
this.effects.GastroAcid = false;
this.effects.HealBlock = 0;
this.effects.Ingrain = false;
this.effects.LeechSeed = -1;
this.effects.LockOn = 0;
this.effects.LockOnPos = -1;
for (const battler of $Battle.actives) {
if (battler.effects.LockOnPos === this.index && battler.effects.LockOn > 0) {
battler.effects.LockOn = 0;
battler.effects.LockOnPos = -1;
}
}
this.effects.MagnetRise = 0;
this.effects.PerishSong = 0;
this.effects.PerishSongUser = -1;
this.effects.PowerTrick = false;
this.effects.Substitute = 0;
this.effects.Telekinesis = 0;
} else {
if (this.effects.LockOn > 0) {
this.effects.LockOn = 2;
} else {
this.effects.LockOn = 0;
}
if (this.effects.PowerTrick) {
let aux = this.attack;
this.attack = this.defense;
this.defense = this.attack;
}
}
// this.damagestate.reset();
this._fainted = false;
this.lastAttacker = {};
this.lastHPLost = 0;
this.tookDamage = false;
this.lastMoveUsed = -1;
this.lastMoveUsedType = -1;
this.lastRoundMoved = -1;
this.movesUsed = {};
this.turncount = 0;
this.effects.Attract = -1;
for (const battler of $Battle.actives) {
if (battler.effects.Attract === this.index) {
battler.effects.Attract = -1;
}
}
this.effects.BatonPass = false;
this.effects.Bide = 0;
this.effects.BideDamage = 0;
this.effects.BideTarget = -1;
this.effects.Charge = 0;
this.effects.ChoiceBand = undefined;
this.effects.Counter = -1;
this.effects.CounterTarget = -1;
this.effects.DefenseCurl = false;
this.effects.DestinyBond = false;
this.effects.Disable = 0;
this.effects.DisableMove = 0;
this.effects.Electrify = false;
this.effects.Encore = 0;
this.effects.EncoreMoveId = undefined;
this.effects.EncoreMove = 0;
this.effects.Endure = false;
this.effects.FirstPledge = 0;
this.effects.FlashFire = false;
this.effects.Flinch = false;
this.effects.FollowMe = 0;
this.effects.Foresight = false;
this.effects.FuryCutter = 0;
this.effects.Grudge = false;
this.effects.HelpingHand = false;
this.effects.HyperBeam = 0;
this.effects.Illusion = null;
/**
* Add here ilusion ability
*/
this.effects.Imprison = false;
this.effects.KingsShield = false;
this.effects.LifeOrb = false;
this.effects.MagicCoat = false;
this.effects.MeanLook = -1;
for (const battler of $Battle.actives) {
if (battler.effects.MeanLook === this.index) battler.effects.MeanLook = -1;
}
this.effects.MeFirst = false;
this.effects.Metronome = 0;
this.effects.MicleBerry = false;
this.effects.Minimize = false;
this.effects.MiracleEye = false;
this.effects.MirrorCoat = -1;
this.effects.MirrorCoatTarget = -1;
this.effects.MoveNext = false;
this.effects.MudSport = false;
this.effects.MultiTurn = 0;
this.effects.MultiTurnAttack = 0;
this.effects.MultiTurnUser = -1;
for (const battler of $Battle.actives) {
if (battler.effects.MultiTurnUser === this.index) {
battler.effects.MultiTurn = 0;
battler.effects.MultiTurnUser = -1;
}
}
this.effects.Nightmare = false;
this.effects.Outrage = 0;
this.effects.ParentalBond = 0;
this.effects.PickupItem = "";
this.effects.PickupUse = "";
this.effects.Pinch = false;
this.effects.Powder = false;
this.effects.Protect = false;
this.effects.ProtectNegation = false;
this.effects.ProtectRate = 1;
this.effects.Pursuit = false;
this.effects.Quash = false;
this.effects.Rage = false;
this.effects.Revenge = 0;
this.effects.Roar = false;
this.effects.Rollout = 0;
this.effects.Roost = false;
this.effects.SkipTurn = false;
this.effects.SkyDrop = false;
this.effects.SmackDown = false;
this.effects.Snatch = false;
this.effects.SpikyShield = false;
this.effects.Stockpile = 0;
this.effects.StockpileDef = 0;
this.effects.StockpileSpDef = 0;
this.effects.Taunt = 0;
this.effects.Torment = false;
this.effects.Toxic = 0;
this.effects.Transform = false;
this.effects.Truant = false;
this.effects.TwoTurnAttack = 0;
this.effects.Type3 = undefined;
this.effects.Unburden = false;
this.effects.Uproar = 0;
this.effects.Uturn = false;
this.effects.WaterSport = false;
this.effects.WeightChange = 0;
this.effects.Yawn = 0;
}
/** These effects are always retained even if a Pokémon is replaced */
initPermanentEffects() {
this.effects.FutureSight = 0;
this.effects.FutureSightMove = 0;
this.effects.FutureSightUser = -1;
this.effects.FutureSightUserPos = -1;
this.effects.HealingWish = false;
this.effects.LunarDance = false;
this.effects.Wish = 0;
this.effects.WishAmount = 0;
this.effects.WishMaker = -1;
}
initialize(pokemon: Pokemon.Pokemon, index: number, batonpass: boolean) {
//Cure status of previous Pokemon with Natural Cure
if (this.hasAbility(ABILITYDEX.NATURALCURE)) this.status = Statuses.Healthy;
// if (this.hasAbility(Abilitydex.REGENERATOR)) this.recoverHP(Math.floor(this.totalhp / 3));
this.initPokemon(pokemon);
this.initEffects(batonpass);
}
/** Update Pokémon who will gain EXP if this battler is defeated */
updateParticipants() {
if (this.isFainted()) return;
if ($Battle.isOpposing(this.index)) {
// Just the player Pokémon will gain epx
for (const battler of this.sides.foe.actives) {
if (!(battler.index in this.participants) && !$Battle.battlers[battler.index].isFainted()) {
this.participants.push(battler.index);
}
}
}
}
//==================================================================================================================
//#region Battle Stats and complex accesors
get hp() {
return this._hp;
}
get attack() {
return this._attack;
}
set attack(value) {
this._attack = value;
}
get defense() {
return this._defense;
}
set defense(value) {
this._defense = value;
}
get spatk() {
return this._spatk;
}
get spdef() {
return this._spdef;
}
get speed() {
let speed = this._speed;
speed = Math.floor(speed * STAGE_MULT[this.stages.Speed]);
let mod = 1;
// mod = Abilities.Effects('modifyStat', this, mod);
// mod = Battle.Items.SpeedStatEffects(this, mod);
// if (this.sides.own.effects.Tailwind > 0) mod *= 2;
// if (this.sides.own.effects.Swamp) mod = Math.round(mod / 2);
// if (this.status === Statuses.Paralysis && !this.hasAbility(Abilitydex.QUICKFEET)) mod = Math.round(mod / 4);
speed = Math.round(speed * mod);
return Math.max(1, speed);
}
damage(amt) {
this._hp -= amt;
this.hpbar.damage(amt);
// $Battle.recoverHPAnimation(this.index);
if (this._hp <= 0) {
this._hp = 0;
this.faint();
}
}
faint(showMessage = true) {
if (!this.isFainted()) {
console.log("!!!***Can't faint with HP greater than 0");
return true;
}
if (this._fainted) {
console.log("!!!***Can't faint if already fainted");
return true;
}
// $Battle.scene.fainted(this);
this.initEffects(false);
this.status = 0;
this.statusCount = 0;
// if (this.pokemon && $Battle.internalbattle) {
// this.pokemon.changeHappiness("faint");
// }
// if (this.pokemon.isMega()) this.pokemon.makeUnmega();
// if (this.pokemon.isPrimal()) this.pokemon.makeUnprimal();
this._fainted = true;
$Battle.choices[this.index] = undefined;
this.sides.own.effects.LastRoundFainted = $Battle.turncount;
if (showMessage) $Battle.showPausedMessage(`${this.name} fainted!`);
console.log(`[Pokémon fainted] ${this.name} fainted`);
return true;
}
get status() {
return this._status;
}
set status(value: Statuses) {
if (this._status === Statuses.Sleep && value === Statuses.Healthy) {
this.effects.Truant = false;
}
this._status = value;
if (this.pokemon) this.pokemon.status = value; // Why?
if (value !== Statuses.Poison) this.effects.Toxic = 0;
if (value !== Statuses.Poison && value !== Statuses.Sleep) {
this.statusCount = 0;
this.pokemon.statusCount = 0;
}
}
weight(attacker: Battler = undefined) {
let w = this.pokemon.weightkg || 500;
if (!attacker || !attacker.hasMoldBreaker()) {
if (this.hasAbility(ABILITYDEX.HEAVYMETAL)) w *= 2;
if (this.hasAbility(ABILITYDEX.LIGHTMETAL)) w /= 2;
}
if (this.hasItem("FLOATSTONE")) w /= 2;
w *= this.effects.WeightChange;
w = Math.floor(w);
if (w < 0) w = 1;
return w;
}
//#endregion
//==================================================================================================================
//==================================================================================================================
//#region Battler info
get name() {
if ($Battle.isOpposing(this.index)) {
if ($Battle.opponent()) return i18n._("the opposing %1", this.nickname);
return i18n._("the wild %1", this.nickname);
} else {
if ($Battle.ownedByPlayer(this.index)) return this.nickname;
return i18n._("the ally %1", this.nickname);
}
}
hasAbility(ability: ABILITYDEX, ignorefainted?: boolean) {
if (this._fainted && !ignorefainted) return false;
if (this.effects.GastroAcid) return false;
return this.ability === ability;
}
hasAbilityIn(abilities: ABILITYDEX[]) {
for (const ability of abilities) {
if (this.hasAbility(ability)) return true;
}
return false;
}
hasItem(item: string, ignorefainted?: boolean) {
if (this._fainted && !ignorefainted) return false;
if (this.effects.Embargo) return false;
if ($Battle.field.effects.MagicRoom > 0) return false;
if (this.hasAbility(ABILITYDEX.KLUTZ, ignorefainted)) return false;
return this.item === item;
}
hasItemIn(items: string[]) {
for (const item of items) {
if (this.hasItem(item)) return true;
}
return false;
}
hasMove(id: MOVEDEX) {
for (const move of this.moveset) {
if (id === move.id) return true;
}
return false;
}
hasMoveType(type: Types) {
for (const move of this.moveset) {
if (move.type === type) return true;
}
return false;
}
hasMovedThisRound() {
if (!this.lastRoundMoved) return false;
return this.lastRoundMoved === $Battle.turncount;
}
hasMoldBreaker() {
return this.hasAbilityIn([ABILITYDEX.MOLDBREAKER, ABILITYDEX.TERAVOLT, ABILITYDEX.TURBOBLAZE]);
}
hasType(type: Types | string) {
for (const t of this.types) {
if (t === type) return true;
}
if (this.effects.Type3 && this.effects.Type3 === type) return true;
return false;
}
isFainted() {
return this.hp <= 0;
}
isOpposing(index) {
for (const foeindex of this.sides.foe.battlers) {
if (foeindex === index) return true;
}
return false;
}
/** Check if the Pokémon touch the field. */
isAirborne(ignoreability: boolean) {
if (this.hasItem("IRONBAll")) return false;
if (this.effects.Ingrain || this.effects.SmackDown) return false;
if ($Battle.field.effects.Gravity) return false;
if (this.hasType("FLYING") && !this.effects.Roost) return true;
if (this.hasAbility(ABILITYDEX.LEVITATE) && !ignoreability) return true;
if (this.hasItem("AIRBALLOON")) return true;
if (this.effects.MagnetRise || this.effects.Telekinesis) return true;
return false;
}
//#endregion
//==================================================================================================================
//==================================================================================================================
//#region Status Conditions
//------------------------------------------------------------------------------------------------------------------
//#region Sleep
canSleep(attacker: Battler, showMessages: boolean, move?, ignorestatus?: boolean) {
if (this.isFainted()) return false;
let selfSleep = attacker && attacker.index === this.index;
if (!ignorestatus && this.status === Statuses.Sleep) {
if (showMessages) $Battle.showMessage(i18n._(`%1 is already asleep!`, this.name));
return false;
}
// user has sustitute or Safeguard
if (!selfSleep) {
if (this.status != Statuses.Healthy || (this.effects.Substitute > 0 && (!move || !move.ignoresSubstitute(attacker)))) {
if (showMessages) $Battle.showMessage(i18n._("But it failed!"));
return false;
}
if (this.sides.own.effects.Safeguard > 0 && (!attacker || !attacker.hasAbility(ABILITYDEX.INFILTRATOR))) {
if (showMessages) $Battle.showMessage(i18n._("%1's team is protected by Safeguard!", this.name));
return false;
}
}
// there are field terrains and user touch the field.
if (!this.isAirborne(attacker && attacker.hasMoldBreaker())) {
if ($Battle.field.effects.ElectricTerrain > 0) {
if (showMessages) $Battle.showMessage(i18n._(`The Electric Terrain prevented %1 from falling asleep!`, this.name));
return false;
} else if ($Battle.field.effects.MistyTerrain > 0) {
if (showMessages) $Battle.showMessage(i18n._(`The Misty Terrain prevented %1 from falling asleep!`, this.name));
return false;
}
}
// uproar
if ((attacker && attacker.hasMoldBreaker()) || !this.hasAbility(ABILITYDEX.SOUNDPROOF)) {
for (const battler of $Battle.actives) {
if (battler.effects.Uproar > 0) {
if (showMessages) $Battle.showMessage(i18n._(`But the uproar kept %1 awake!`, this.name));
return false;
}
}
}
if (!attacker || selfSleep || !attacker.hasMoldBreaker()) {
if (
this.hasAbilityIn([ABILITYDEX.VITALSPIRIT, ABILITYDEX.INSOMNIA, ABILITYDEX.SWEETVEIL]) ||
(this.hasAbility(ABILITYDEX.FLOWERVEIL) && this.hasType("GRASS")) ||
[Weathers.SunnyDay, Weathers.HarshSun].contains($Battle.weather)
) {
let msg = `%1 stayed awake using its %2!`;
if (showMessages) $Battle.showMessage(i18n._(msg, this.name, Abilities.getName(this.ability)));
return false;
}
// if this.partner.hasAbility(Abilitydex.SWEETVEIL) ||
// (this.partner.hasAbility(Abilitydex.FLOWERVEIL) && this.hasType('GRASS'))
// abilityname=AbilitiesEffetcs.name(this.partner.ability)
// $Battle.showMessage(i18n._("%1 stayed awake using its partner's %2!",this.name,abilityname)) if showMessages
// return false
// end
}
return true;
}
canSleepYawn() {
if (this.status !== Statuses.Healthy) return false;
if (!this.hasAbility(ABILITYDEX.SOUNDPROOF)) {
for (const battler of $Battle.actives) {
if (battler.effects.Uproar > 0) return false;
}
}
if (!this.isAirborne(undefined)) {
if ($Battle.field.effects.ElectricTerrain > 0 || $Battle.field.effects.MistyTerrain > 0) return false;
}
if (
this.hasAbilityIn([ABILITYDEX.VITALSPIRIT, ABILITYDEX.INSOMNIA, ABILITYDEX.SWEETVEIL]) ||
(this.hasAbility(ABILITYDEX.LEAFGUARD) && ($Battle.weather == Weathers.SunnyDay || $Battle.weather == Weathers.HarshSun))
) {
return false;
}
// return false if this.partner.this.hasAbility(:SWEETVEIL)
return true;
}
sleep(msg?: string) {
this.status = Statuses.Sleep;
this.statusCount = 2 + Math.randomInt(3);
if (this.hasAbility(ABILITYDEX.EARLYBIRD)) this.statusCount = Math.floor(this.statusCount / 2);
// pbCancelMoves()
// $Battle.pbCommonAnimation("Sleep",self,nil)
if (msg && msg !== "") $Battle.showMessage(msg);
else $Battle.showMessage(i18n._("%1 fell asleep!", this.name));
console.log("[Status change] #{this.name} fell asleep (#{this.statusCount} turns)");
}
sleepSelf(duration: number = -1) {
this.status = Statuses.Sleep;
if (duration > 0) this.statusCount = duration;
else this.statusCount = 2 + Math.randomInt(3);
if (this.hasAbility(ABILITYDEX.EARLYBIRD)) this.statusCount = Math.floor(this.statusCount / 2);
// pbCancelMoves
// $Battle.pbCommonAnimation("Sleep",self,nil)
console.log("[Status change] #{this.name} made itself fall asleep (#{this.statusCount} turns)");
}
//#endregion
//------------------------------------------------------------------------------------------------------------------
//------------------------------------------------------------------------------------------------------------------
//#region attrack
canAttract(attacker, showMessages) {
if (this.isFainted()) return false;
if (!attacker || attacker.isFainted()) return false;
if (this.effects.Attract >= 0) {
if (showMessages) $Battle.showMessage(i18n._("But it failed!"));
return false;
}
if (attacker.gender === "N" || this.gender === "N" || attacker.gender === this.gender) {
if (showMessages) $Battle.showMessage(i18n._("But it failed!"));
return false;
}
if ((!attacker || !attacker.hasMoldBreaker()) && this.hasAbility(ABILITYDEX.OBLIVIOUS)) {
if (showMessages) $Battle.showMessage(i18n._("%1's %2 prevents romance!", this.name, Abilities.getName(this.ability)));
return false;
}
return true;
}
attract(attacker: Battler, showMessage: boolean) {
this.effects.Attract = attacker.index;
// $Battle.pbCommonAnimation("Attract", this, null);
if (showMessage) $Battle.showMessage(i18n._(`%1 fell in love!`, this.name));
console.log(`[Lingering effect triggered] ${this.name} became infatuated (with ${attacker.name})`);
if (this.hasItem("DESTINYKNOT") && attacker.canAttract(this, false)) {
attacker.attract(this, false);
let msg = `%1's %2 made %3 fall in love!`;
$Battle.showMessage(i18n._(msg, this.name, Items.name(this.item), attacker.name));
console.log(`[Item triggered] ${this.name}'s Destiny Knot`);
}
}
//#endregion
//------------------------------------------------------------------------------------------------------------------
//------------------------------------------------------------------------------------------------------------------
//#region Poison
canPoison(attacker: Battler, showMessages: boolean, move?: any) {
if (this._fainted) return false;
if (this.status === Statuses.Poison) {
if (showMessages) $Battle.showMessage(i18n._("%1 is already poisoned.", this.name));
return false;
}
if (this.status !== Statuses.Healthy || (this.effects.Substitute > 0 && (!move || !move.ignoresSubstitute(attacker)))) {
if (showMessages) $Battle.showMessage(i18n._("But it failed!"));
return false;
}
if ((this.hasType("POISON") || this.hasType("STEEL")) && !this.hasItem("RINGTARGET")) {
if (showMessages) $Battle.showMessage(i18n._("It doesn't affect %1...", this.name));
return false;
}
if ($Battle.field.effects.MistyTerrain > 0 && !this.isAirborne(attacker && attacker.hasMoldBreaker())) {
let m = "The Misty Terrain prevented %1 from being poisoned!";
if (showMessages) $Battle.showMessage(i18n._(m, this.name));
return false;
}
if (!attacker || !attacker.hasMoldBreaker()) {
if (
this.hasAbility(ABILITYDEX.IMMUNITY) ||
(this.hasAbility(ABILITYDEX.FLOWERVEIL) && this.hasType("GRASS")) ||
(this.hasAbility(ABILITYDEX.LEAFGUARD) && ($Battle.weather === Weathers.SunnyDay || $Battle.weather === Weathers.HarshSun))
) {
let m = "%1's %2 prevents poisoning!";
if (showMessages) $Battle.showMessage(i18n._(m, this.name, Abilities.getName(this.ability)));
return false;
}
if (this.partner.hasAbility(ABILITYDEX.FLOWERVEIL) && this.hasType("GRASS")) {
let m = "%1's partner's %2 prevents poisoning!";
if (showMessages) $Battle.showMessage(i18n._(m, this.name, Abilities.getName(this.partner.ability)));
return false;
}
}
if (this.sides.own.effects.Safeguard > 0 && (!attacker || !attacker.hasAbility(ABILITYDEX.INFILTRATOR))) {
if (showMessages) $Battle.showMessage(i18n._("%1's team is protected by Safeguard!", this.name));
return false;
}
return true;
}
canPoisonSynchronize(opponent: Battler) {
if (this._fainted) return false;
if ((this.hasType("POISON") || this.hasType("STEEL")) && !this.hasItem("RINGTARGET")) {
$Battle.showMessage(i18n._("%1's %2 had no effect on %3!", opponent.name, Abilities.getName(opponent.ability), this.name));
return false;
}
if (this.status !== Statuses.Healthy) return false;
if (
this.hasAbility(ABILITYDEX.IMMUNITY) ||
(this.hasAbility(ABILITYDEX.FLOWERVEIL) && this.hasType("GRASS")) ||
(this.hasAbility(ABILITYDEX.LEAFGUARD) && ($Battle.weather === Weathers.SunnyDay || $Battle.weather === Weathers.HarshSun))
) {
$Battle.showMessage(
i18n._(
"%1's %2 prevents %3's %4 from working!",
this.name,
Abilities.getName(this.ability),
opponent.name,
Abilities.getName(opponent.ability)
)
);
return false;
}
if (this.partner.hasAbility(ABILITYDEX.FLOWERVEIL) && this.hasType("GRASS")) {
$Battle.showMessage(
i18n._(
"%1's %2 prevents %3's %4 from working!",
this.partner.name,
Abilities.getName(this.partner.ability),
opponent.name,
Abilities.getName(opponent.ability)
)
);
return false;
}
return true;
}
canPoisonSpikes(moldbreaker?: boolean) {
if (this._fainted) return false;
if (this.status !== Statuses.Healthy) return false;
if (this.hasType("POISON") || this.hasType("STEEL")) return false;
if (!moldbreaker) {
if (
this.hasAbility(ABILITYDEX.IMMUNITY) ||
(this.hasAbility(ABILITYDEX.FLOWERVEIL) && this.hasType("GRASS")) ||
(this.partner.hasAbility(ABILITYDEX.FLOWERVEIL) && this.hasType("GRASS"))
)
return false;
if (this.hasAbility(ABILITYDEX.LEAFGUARD) && ($Battle.weather === Weathers.SunnyDay || $Battle.weather == Weathers.HarshSun)) return false;
}
if (this.sides.own.effects.Safeguard > 0) return false;
return true;
}
poison(attacker: Battler = undefined, msg: string = undefined, toxic: boolean = false) {
this.status = Statuses.Poison;
this.statusCount = toxic ? 1 : 0;
this.effects.Toxic = 0;
// $Battle.pbCommonAnimation("Poison",self,nil)
if (msg && msg !== "") $Battle.showMessage(msg);
else {
if (toxic) $Battle.showMessage(i18n._("%1 was badly poisoned!", this.name));
else $Battle.showMessage(i18n._("%1 was poisoned!", this.name));
}
if (toxic) console.log("[Status change] #{this.name} was badly poisoned]");
else console.log("[Status change] #{this.name} was poisoned");
if (attacker && this.index !== attacker.index && this.hasAbility(ABILITYDEX.SYNCHRONIZE)) {
if (attacker.canPoisonSynchronize(this)) {
console.log("[Ability triggered] #{this.this.name}'s Synchronize");
let m = i18n._("%1's %2 poisoned %3!", this.name, Abilities.getName(this.ability), attacker.name);
attacker.poison(undefined, m, toxic);
}
}
}
//#endregion
//------------------------------------------------------------------------------------------------------------------
//------------------------------------------------------------------------------------------------------------------
//#region Paralize
canParalize(attacker?, opponent?) {}
paralize(...args) {}
//#endregion
//------------------------------------------------------------------------------------------------------------------
//------------------------------------------------------------------------------------------------------------------
//#region Burn
/**
* Check if the Pokémon can be burned.
* this.param attacker The Attacter Pokémon if exist
* this.param showMessages Show or not the info messages
* this.param move the move used
*/
canBurn(attacker: Battler, showMessages: boolean, move: any = undefined) {
if (this._fainted) return false;
if (this.status === Statuses.Burn) {
if (showMessages) $Battle.showMessage(i18n._("%1 already has a burn.", this.name));
return false;
}
if (this.status !== Statuses.Healthy || (this.effects.Substitute > 0 && (!move || !move.ignoresSubstitute(attacker)))) {
if (showMessages) $Battle.showMessage(i18n._("But it failed!"));
return false;
}
if ($Battle.field.effects.MistyTerrain > 0 && !this.isAirborne(attacker && attacker.hasMoldBreaker())) {
if (showMessages) {
let msg = i18n._("The Misty Terrain prevented %1 from being burned!", this.name);
$Battle.showMessage(msg);
}
return false;
}
if (this.hasType("FIRE") && !this.hasItem("RINGTARGET")) {
if (showMessages) $Battle.showMessage(i18n._("It doesn't affect %1...", this.name));
return false;
}
if (!attacker || !attacker.hasMoldBreaker()) {
if (
this.hasAbility(ABILITYDEX.WATERVEIL) ||
(this.hasAbility(ABILITYDEX.FLOWERVEIL) && this.hasType("GRASS")) ||
(this.hasAbility(ABILITYDEX.LEAFGUARD) && ($Battle.weather === Weathers.SunnyDay || $Battle.weather === Weathers.HarshSun))
) {
if (showMessages) {
let msg = i18n._("%1's %2 prevents burns!", this.name, Abilities.getName(this.ability));
$Battle.showMessage(msg);
}
return false;
}
if (this.partner.hasAbility(ABILITYDEX.FLOWERVEIL) && this.hasType("GRASS")) {
if (showMessages) {
let msg = i18n._("%1's partner's %2 prevents burns!", this.name, Abilities.getName(this.partner.ability));
$Battle.showMessage(msg);
}
return false;
}
}
if (this.sides.own.effects.Safeguard > 0 && (!attacker || !attacker.hasAbility(ABILITYDEX.INFILTRATOR))) {
if (showMessages) $Battle.showMessage(i18n._("%1's team is protected by Safeguard!", this.name));
return false;
}
return true;
}
/**
* Check if the Pokémon can be **burned** by **Synchronize** ability
* this.param opponent The ability Pokémon user
*/
canBurnSynchronize(opponent: Battler) {
if (this._fainted) return false;
if (this.status !== Statuses.Healthy) return false;
if (this.hasType("FIRE") && !this.hasItem("RINGTARGET")) {
let msg = i18n._("%1's %2 had no effect on %3!", opponent.name, Abilities.getName(opponent.ability), this.name);
$Battle.showMessage(msg);
return false;
}
let text = "%1's %2 prevents %3's %4 from working!";
let msg = i18n._(text, this.name, Abilities.getName(this.ability), opponent.name, Abilities.getName(opponent.ability));
if (
this.hasAbility(ABILITYDEX.WATERVEIL) ||
(this.hasAbility(ABILITYDEX.FLOWERVEIL) && this.hasType("GRASS")) ||
(this.hasAbility(ABILITYDEX.LEAFGUARD) && ($Battle.weather === Weathers.SunnyDay || $Battle.weather == Weathers.HarshSun))
) {
$Battle.showMessage(msg);
return false;
}
if (this.partner.hasAbility(ABILITYDEX.FLOWERVEIL) && this.hasType("GRASS")) {
$Battle.showMessage(msg);
return false;
}
return true;
}
/**
* Burn the Pokémon, sets the Pokémon ststaus to burn.
* this.param attacker the attacker Pokémon if exist
* this.param msg custom info message
*/
burn(attacker: Battler = undefined, msg: string = undefined) {
this.status = Statuses.Burn;
this.statusCount = 0;
// $Battle.pbCommonAnimation("Burn",self,nil)
if (msg && msg != "") $Battle.showMessage(msg);
else $Battle.showMessage(i18n._("%1 was burned!", this.name));
console.log("[Status change] #{this.name} was burned");
if (attacker && this.index !== attacker.index && this.hasAbility(ABILITYDEX.SYNCHRONIZE)) {
if (attacker.canBurnSynchronize(this)) {
console.log("[Ability triggered] #{this.this.name}'s Synchronize");
let m = i18n._("%1's %2 burned %3!", this.name, Abilities.getName(this.ability), attacker.name);
attacker.burn(undefined, m);
}
}
}
//#endregion
//------------------------------------------------------------------------------------------------------------------
//#endregion
//==================================================================================================================
//==================================================================================================================
//#region Increase stat stages
tooHigh(stat: Stats) {
return this.stages[stat] >= 6;
}
canIncreaseStatStage(
stat: Stats,
attacker: Battler = undefined,
showMessages = false,
move = undefined,
moldbreaker = false,
ignoreContrary = false
) {
if (this._fainted) return false;
if (!moldbreaker) {
if (!attacker || attacker.index === this.index || !attacker.hasMoldBreaker()) {
if (this.hasAbility(ABILITYDEX.CONTRARY) && !ignoreContrary) {
return this.canReduceStatStage(stat, attacker, showMessages, moldbreaker, true);
}
}
}
if (this.tooHigh(stat)) {
if (showMessages) $Battle.showMessage(i18n._("%1's %2 won't go any higher!", this.name, Stats.name(stat)));
return false;
}
return true;
}
increaseStatBasic(stat: Stats, increment: number, attacker: Battler = undefined, moldbreaker = false, ignoreContrary = false) {
if (!moldbreaker) {
if (!attacker || attacker.index == this.index || !attacker.hasMoldBreaker()) {
if (this.hasAbility(ABILITYDEX.CONTRARY) && !ignoreContrary) {
return this.reduceStatBasic(stat, increment, attacker, moldbreaker, true);
}
if (this.hasAbility(ABILITYDEX.SIMPLE)) increment *= 2;
}
}
increment = Math.min(increment, 6 - this.stages[stat]); // Why?
console.log(`[Stat change] ${this.name}'s ${Stats.name(stat)} rose by ${increment} stage(s)`);
console.log(`${Stats.name(stat)} stage: ${this.stages[stat]} --> ${this.stages[stat] + increment}`);
this.stages[stat] += increment;
return increment;
}
increaseStat(
stat: Stats,
increment: number,
attacker: Battler,
showMessages: boolean,
move: any = undefined,
animation: boolean = true,
moldbreaker: boolean = false,
ignoreContrary: boolean = false
) {
if (!(stat in Stats)) return false;
if (!moldbreaker) {
if (!attacker || attacker.index === this.index || !attacker.hasMoldBreaker()) {
if (this.hasAbility(ABILITYDEX.CONTRARY) && !ignoreContrary) {
return this.reduceStat(stat, increment, attacker, showMessages, move, animation, moldbreaker, true);
}
}
}
if (this.canIncreaseStatStage(stat, attacker, showMessages, move, moldbreaker, ignoreContrary)) {
increment = this.increaseStatBasic(stat, increment, attacker, moldbreaker, ignoreContrary);
if (increment > 0) {
if (ignoreContrary) {
if (animation) $Battle.showMessage(i18n._("%1's %2 activated!", this.name, Abilities.getName(this.ability)));
}
// if (animation) $Battle.pbCommonAnimation("StatUp",self,nil);
let texts = [
i18n._("%1's %2 rose!", this.name, Stats.name(stat)),
i18n._("%1's %2 rose sharply!", this.name, Stats.name(stat)),
i18n._("%1's %2 rose drastically!", this.name, Stats.name(stat))
];
if (showMessages) $Battle.showMessage(texts[Math.min(increment - 1, 2)]);
return true;
}
}
return false;
}
increaseStatWithCause(
stat: Stats,
increment: number,
attacker: Battler,
cause: string,
showanim = true,
showMessages = true,
moldbreaker = false,
ignoreContrary = false
) {
if (!(stat in Stats)) return false;
if (!moldbreaker) {
if (!attacker || attacker.index == this.index || !attacker.hasMoldBreaker()) {
if (this.hasAbility(ABILITYDEX.CONTRARY) && !ignoreContrary) {
return this.reduceStatWithCause(stat, increment, attacker, cause, showanim, showMessages, moldbreaker, true);
}
}
}
if (this.canIncreaseStatStage(stat, attacker, false, undefined, moldbreaker, ignoreContrary)) {
increment = this.increaseStatBasic(stat, increment, attacker, moldbreaker, ignoreContrary);
if (increment > 0) {
if (ignoreContrary) {
if (showMessages) $Battle.showMessage(i18n._("%1's %2 activated!", this.name, Abilities.getName(this.ability)));
}
// if (showanim) $Battle.pbCommonAnimation("StatUp",self,nil)
let texts = [];
if (attacker.index === this.index) {
texts = [
i18n._("%1's %2 raised its %3!", this.name, cause, Stats.name(stat)),
i18n._("%1's %2 sharply raised its %3!", this.name, cause, Stats.name(stat)),
i18n._("%1's %2 went way up!", this.name, Stats.name(stat))
];
} else {
texts = [
i18n._("%1's %2 raised %3's %4!", attacker.name, cause, this.name, Stats.name(stat)),
i18n._("%1's %2 sharply raised %3's %4!", attacker.name, cause, this.name, Stats.name(stat)),
i18n._("%1's %2 drastically raised %3's %4!", attacker.name, cause, this.name, Stats.name(stat))
];
}
if (showMessages) $Battle.showMessage(texts[Math.min(increment - 1, 2)]);
return true;
}
}
return false;
}
//#endregion
//==================================================================================================================
//==================================================================================================================
//#region Decrease stat stages
tooLow(stat: Stats) {
return this.stages[stat] <= -6;
}
/**
* Check if can decrease the stat stage
*
* Tickle (04A) and Noble Roar (13A) can't use this, but replicate it instead.
* (Reason is they lowers more than 1 stat independently, and therefore could
* show certain messages twice which is undesirable.)
* this.param stat The stat to check
* this.param attacker The Attacker pokémon
* this.param showMessages Show Info messages
* this.param move The move used
* this.param moldbreaker
* this.param ignoreContrary
*/
canReduceStatStage(
stat: Stats,
attacker: Battler = undefined,
showMessages = false,
move: any = undefined,
moldbreaker = false,
ignoreContrary = false
) {
if (this._fainted) return false;
if (!moldbreaker) {
if (!attacker || attacker.index === this.index || !attacker.hasMoldBreaker()) {
if (this.hasAbility(ABILITYDEX.CONTRARY) && !ignoreContrary) {
return this.canIncreaseStatStage(stat, attacker, showMessages, move, moldbreaker, true);
}
}
}
let selfreduce = attacker && attacker.index === this.index;
if (!selfreduce) {
if (this.effects.Substitute > 0 && (!move || !move.ignoresSubstitute(attacker))) {
if (showMessages) $Battle.showMessage(i18n._("But it failed!"));
return false;
}
if (this.sides.own.effects.Mist > 0 && (!attacker || !attacker.hasAbility(ABILITYDEX.INFILTRATOR))) {
if (showMessages) $Battle.showMessage(i18n._("%1 is protected by Mist!", this.name));
return false;
}
if (!moldbreaker && (!attacker || !attacker.hasMoldBreaker())) {
if (this.hasAbilityIn([ABILITYDEX.CLEARBODY, ABILITYDEX.WHITESMOKE])) {
if (showMessages) {
let msg = "%1's %2 prevents stat loss!";
$Battle.showMessage(i18n._(msg, this.name, Abilities.getName(this.ability)));
}
return false;
}
if (this.hasType("GRASS")) {
if (this.hasAbility(ABILITYDEX.FLOWERVEIL)) {
if (showMessages) {
let msg = "%1's %2 prevents stat loss!";
$Battle.showMessage(i18n._(msg, this.name, Abilities.getName(this.ability)));
}
return false;
} else if (this.partner.hasAbility(ABILITYDEX.FLOWERVEIL)) {
if (showMessages) {
let msg = "%1's %2 prevents %3's stat loss!";
$Battle.showMessage(i18n._(msg, this.partner.name, Abilities.getName(this.ability), this.name));
}
return false;
}
}
if (stat === Stats.Attack && this.hasAbility(ABILITYDEX.HYPERCUTTER)) {
if (showMessages) {
let msg = "%1's %2 prevents Attack loss!";
$Battle.showMessage(i18n._(msg, this.name, Abilities.getName(this.ability)));
}
return false;
}
if (stat === Stats.Defense && this.hasAbility(ABILITYDEX.BIGPECKS)) {
if (showMessages) {
let msg = "%1's %2 prevents Defence loss!";
$Battle.showMessage(i18n._(msg, this.name, Abilities.getName(this.ability)));
}
return false;
}
if (stat === Stats.Accuracy && this.hasAbility(ABILITYDEX.KEENEYE)) {
if (showMessages) {
let msg = "%1's %2 prevents Accuracy loss!";
$Battle.showMessage(i18n._(msg, this.name, Abilities.getName(this.ability)));
}
return false;
}
}
}
if (this.tooLow(stat)) {
if (showMessages) $Battle.showMessage(i18n._("%1's %2 won't go any lower!", this.name, Stats.name(stat)));
return false;
}
return true;
}
reduceStatBasic(stat: Stats, increment: number, attacker: Battler = undefined, moldbreaker = false, ignoreContrary = false) {
// moldbreaker is true only when Roar forces out a Pokémon into Sticky Web
if (!moldbreaker) {
if (!attacker || attacker.index == this.index || !attacker.hasMoldBreaker()) {
if (this.hasAbility(ABILITYDEX.CONTRARY) && !ignoreContrary) {
return this.increaseStatBasic(stat, increment, attacker, moldbreaker, true);
}
if (this.hasAbility(ABILITYDEX.SIMPLE)) increment *= 2;
}
}
increment = Math.min(increment, 6 + this.stages[stat]);
console.log(`[Stat change] ${this.name}'s ${Stats.name(stat)} fell by ${increment} stage(s)`);
console.log(`${Stats.name(stat)} stage: ${this.stages[stat]} --> ${this.stages[stat] - increment}`);
this.stages[stat] -= increment;
return increment;
}
reduceStat(
stat: Stats,
increment: number,
attacker: Battler,
showMessages: boolean,
move: any = undefined,
downanim = true,
moldbreaker = false,
ignoreContrary = false
) {
if (!(stat in Stats)) return false;
if (!moldbreaker) {
if (!attacker || attacker.index === this.index || !attacker.hasMoldBreaker()) {
if (this.hasAbility(ABILITYDEX.CONTRARY) && !ignoreContrary) {
return this.increaseStat(stat, increment, attacker, showMessages, move, downanim, moldbreaker, true);
}
}
}
if (this.canReduceStatStage(stat, attacker, showMessages, move, moldbreaker, ignoreContrary)) {
increment = this.reduceStatBasic(stat, increment, attacker, moldbreaker, ignoreContrary);
if (increment > 0) {
if (ignoreContrary) {
if (downanim) $Battle.showMessage(i18n._("%1's %2 activated!", this.name, Abilities.getName(this.ability)));
}
// $Battle.pbCommonAnimation("StatDown",self,nil) if downanim
let texts = [
i18n._("%1's %2 fell!", this.name, Stats.name(stat)),
i18n._("%1's %2 harshly fell!", this.name, Stats.name(stat)),
i18n._("%1's %2 severely fell!", this.name, Stats.name(stat))
];
if (showMessages) $Battle.showMessage(texts[Math.min(increment - 1, 2)]);
// Defiant
if (this.hasAbility(ABILITYDEX.DEFIANT) && (!attacker || attacker.isOpposing(this.index))) {
this.increaseStatWithCause(Stats.Attack, 2, this, Abilities.getName(this.ability));
}
// Competitive
if (this.hasAbility(ABILITYDEX.COMPETITIVE) && (!attacker || attacker.isOpposing(this.index))) {
this.increaseStatWithCause(Stats.SpAtk, 2, this, Abilities.getName(this.ability));
}
return true;
}
}
return false;
}
reduceStatWithCause(
stat: Stats,
increment: number,
attacker: Battler,
cause: string,
showanim = true,
showMessages = true,
moldbreaker = false,
ignoreContrary = false
) {
if (!(stat in Stats)) return false;
if (!moldbreaker) {
if (!attacker || attacker.index === this.index || !attacker.hasMoldBreaker()) {
if (this.hasAbility(ABILITYDEX.CONTRARY) && !ignoreContrary) {
return this.increaseStatWithCause(stat, increment, attacker, cause, showanim, showMessages, moldbreaker, true);
}
}
}
if (this.canReduceStatStage(stat, attacker, false, undefined, moldbreaker, ignoreContrary)) {
increment = this.reduceStatBasic(stat, increment, attacker, moldbreaker, ignoreContrary);
if (increment > 0) {
if (ignoreContrary) {
if (showMessages) $Battle.showMessage(i18n._("%1's %2 activated!", this.name, Abilities.getName(this.ability)));
}
// if (showanim) $Battle.pbCommonAnimation("StatDown",self,nil)
let texts = [];
if (attacker.index === this.index) {
texts = [
i18n._("%1's %2 lowered its %3!", this.name, cause, Stats.name(stat)),
i18n._("%1's %2 harshly lowered its %3!", this.name, cause, Stats.name(stat)),
i18n._("%1's %2 severely lowered its %3!", this.name, cause, Stats.name(stat))
];
} else {
texts = [
i18n._("%1's %2 lowered %3's %4!", attacker.name, cause, this.name, Stats.name(stat)),
i18n._("%1's %2 harshly lowered %3's %4!", attacker.name, cause, this.name, Stats.name(stat)),
i18n._("%1's %2 severely lowered %3's %4!", attacker.name, cause, this.name, Stats.name(stat))
];
}
if (showMessages) $Battle.showMessage(texts[Math.min(increment - 1, 2)]);
// Defiant
if (this.hasAbility(ABILITYDEX.DEFIANT) && (!attacker || attacker.isOpposing(this.index))) {
this.increaseStatWithCause(Stats.Attack, 2, this, Abilities.getName(this.ability));
}
// Competitive
if (this.hasAbility(ABILITYDEX.COMPETITIVE) && (!attacker || attacker.isOpposing(this.index))) {
this.increaseStatWithCause(Stats.SpAtk, 2, this, Abilities.getName(this.ability));
}
return true;
}
}
return false;
}
reduceAttackStatIntimidate(opponent: Battler) {
if (this._fainted) return false;
if (this.effects.Substitute > 0) {
let msg = "%1's substitute protected it from %2's %3!";
$Battle.showMessage(i18n._(msg, this.name, opponent.name, Abilities.getName(opponent.ability)));
return false;
}
if (!opponent.hasAbility(ABILITYDEX.CONTRARY)) {
if (this.sides.own.effects.Mist > 0) {
let msg = "%1 is protected from %2's %3 by Mist!";
$Battle.showMessage(i18n._(msg, this.name, opponent.name, Abilities.getName(opponent.ability)));
return false;
}
if (
this.hasAbilityIn([ABILITYDEX.CLEARBODY, ABILITYDEX.WHITESMOKE, ABILITYDEX.HYPERCUTTER]) ||
(this.hasAbility(ABILITYDEX.FLOWERVEIL) && this.hasType("GRASS"))
) {
let msg = "%1's %2 prevented %3's %4 from working!";
$Battle.showMessage(i18n._(msg, this.name, Abilities.getName(this.ability), opponent.name, Abilities.getName(opponent.ability)));
return false;
}
if (this.partner.hasAbility(ABILITYDEX.FLOWERVEIL) && this.hasType("GRASS")) {
let msg = "%1's %2 prevented %3's %4 from working!";
$Battle.showMessage(
i18n._(msg, this.partner.name, Abilities.getName(this.partner.ability), opponent.name, Abilities.getName(opponent.ability))
);
return false;
}
}
return this.reduceStatWithCause(Stats.Attack, 1, opponent, Abilities.getName(opponent.ability));
}
//#endregion
//==================================================================================================================
reduceHP(...args) {
return 0;
}
recoverHP(...arg) {
return 0;
}
get partner() {
return this;
}
cureStatus(...args) {
throw Error("Not Implemented");
}
cureConfusion(...args) {
throw Error("Not Implemented");
}
cureAttract(...args) {
throw Error("Not Implemented");
}
//==================================================================================================================
// Held item effects
//==================================================================================================================
consumeItem(recycle = true, pickup = true) {
let itemname = Items.name(this.item);
if (recycle) this.pokemon.itemRecycle = this.item;
if (this.pokemon.itemInitial == this.item) this.pokemon.itemInitial = undefined;
if (pickup) {
this.effects.PickupItem = this.item;
this.effects.PickupUse = $Battle.nextPickupUse();
}
this.item = undefined;
this.effects.Unburden = true;
// Symbiosis
// if pbPartner && pbPartner.hasWorkingAbility(:SYMBIOSIS) && recycle
// if pbPartner.item>0 &&
// !$Battle.isUnlosableItem(this.partner(),pbPartner.item) &&
// !$Battle.isUnlosableItem(self,pbPartner.item)
// $Battle.pbDisplay(_INTL("{1}'s {2} let it share its {3} with {4}!",
// pbPartner.pbThis,PBAbilities.getName(pbPartner.ability),
// Items.name(pbPartner.item),pbThis(true)));
// this.item=pbPartner.item
// pbPartner.item=0
// pbPartner.effects.Unburden=true
// pbBerryCureCheck
// end
// end
}
selectAction() {
if (Utils.chance(30)) return ActionChoices.Switch;
return ActionChoices.UseMove;
}
action(battle) {
return new Promise((resolve, reject) => {
// console.log(`${this.species} - Level: ${this.level} - Speed: ${this.speed}`);
// console.log(`${this.species} used ${this.moveset[0].name}`);
resolve();
});
}
}
} | the_stack |
import * as React from 'react'
import {CSSProperties} from 'react'
import {Button, Divider, Input, LinearProgress, Typography, Box} from '@material-ui/core'
import Alert, {Color as AlertColor} from '@material-ui/lab/Alert'
import Link from '../components/link'
import Autocomplete from '../keys/autocomplete'
import * as grpc from '@grpc/grpc-js'
import {shell} from 'electron'
import Header from '../header'
import TextInputView from '../textinput'
import {generateID} from '../helper'
import {Store} from 'pullstate'
import {
welcomeStatus,
canStartStatus,
connectingStatus,
connectedStatus,
disconnectedStatus,
errorStatus,
} from './status'
import {rpc} from '../rpc/client'
import {EDX25519, EDX25519Public} from '../rpc/keys'
import {
WormholeStatus,
WormholeInput,
WormholeOutput,
WormholeMessageStatus,
} from '@keys-pub/tsclient/lib/rpc'
import {ClientDuplexStream, RPCError} from '@keys-pub/tsclient'
import {WormholeMessage, WormholeMessageType} from './types'
type WormholeState = {
sender: string
recipient: string
messages: WormholeMessage[]
}
const store = new Store<WormholeState>({
sender: '',
recipient: '',
messages: [],
})
export type Props = {
sender: string
recipient: string
messages: WormholeMessage[]
setRecipient: (recipient?: string) => void
setSender: (sender?: string) => void
setMessages: (messages: WormholeMessage[]) => void
}
type State = {
loading: boolean
connected: boolean
rows: readonly WormholeMessage[]
}
class WormholeView extends React.Component<Props, State> {
state = {
loading: false,
connected: false,
rows: this.props.messages,
}
private listRef: React.RefObject<HTMLDivElement> = React.createRef()
private wormhole?: ClientDuplexStream<WormholeInput, WormholeOutput>
componentDidMount() {
if (this.state.rows.length == 0) {
this.setState({rows: [welcomeStatus()]})
}
}
componentDidUpdate(prevProps: Props, prevState: State) {
if (this.props.sender != prevProps.sender || this.props.recipient != prevProps.recipient) {
if (this.props.recipient && this.props.sender) {
this.setState({rows: [welcomeStatus(), canStartStatus()]})
} else {
this.setState({rows: [welcomeStatus()]})
}
}
}
componentWillUnmount() {
this.close()
}
openInvite = () => {}
start = async () => {
console.log('Wormhole start...')
this.addRow(connectingStatus(this.props.recipient))
this.setState({loading: true, connected: false})
this.wormhole = rpc.wormhole()
this.wormhole.on('error', (err: RPCError) => {
this.setState({loading: false, connected: false})
if (err.code == grpc.status.CANCELLED || err.message == 'closed') {
// Closed
this.addRow(disconnectedStatus())
} else {
this.addRow(errorStatus(err.message))
}
return
})
this.wormhole.on('data', (res: WormholeOutput) => {
if (this.state.loading && res.status == WormholeStatus.WORMHOLE_CONNECTED) {
this.setState({loading: false, connected: true})
this.addRow(connectedStatus())
}
if (res.status == WormholeStatus.WORMHOLE_CLOSED) {
this.setState({loading: false, connected: false})
this.addRow(disconnectedStatus())
return
}
switch (res.message?.status) {
case WormholeMessageStatus.WORMHOLE_MESSAGE_ACK:
this.ack(res.message.id!)
return
}
if ((res.message?.text?.length || 0) > 0) {
this.addRow({id: res.message!.id!, text: res.message!.text!, type: WormholeMessageType.Received})
}
})
this.wormhole.on('end', () => {
this.setState({loading: false})
})
const req: WormholeInput = {
sender: this.props.sender,
recipient: this.props.recipient,
invite: '',
id: '',
text: '',
}
console.log('Wormhole request:', req)
this.wormhole.write(req)
}
send = (id: string, text: string) => {
if (text == '') {
return
}
if (!this.wormhole) {
return
}
const req: WormholeInput = {
id: id,
text: text,
sender: this.props.sender,
recipient: this.props.recipient,
invite: '',
}
// TODO: Check if wormhole closed
this.wormhole.write(req)
}
close = () => {
// TODO: Check if wormhole already closed
if (this.wormhole) {
this.addRowUnlessLast(disconnectedStatus())
this.wormhole.cancel()
this.wormhole = undefined
}
this.setState({loading: false, connected: false})
}
ack = (id: string) => {
const nextRows = this.state.rows.map((r: WormholeMessage) => {
if (r.id == id) {
return {...r, pending: false}
}
return r
})
this.setState({rows: nextRows})
}
rowExists = (row: WormholeMessage): boolean => {
return this.state.rows.some((r: WormholeMessage) => {
return r.id == row.id
})
}
rowLast = (): WormholeMessage | undefined => {
if (this.state.rows.length > 0) {
return this.state.rows[this.state.rows.length - 1]
}
return undefined
}
addRowUnlessExists = (row: WormholeMessage) => {
if (!this.rowExists(row)) {
this.addRow(row)
}
}
addRowUnlessLast = (row: WormholeMessage) => {
const last = this.rowLast()
if (last?.id != row.id) {
this.addRow(row)
}
}
addRow = (row: WormholeMessage) => {
const nextRows = [...this.state.rows, row]
this.setState({
rows: nextRows,
})
this.props.setMessages(nextRows)
// TODO: Don't scroll to bottom if we aren't at the bottom
this.forceUpdate(() => {
this.scrollToBottom()
})
}
submitEditing = async (text: string) => {
if (text == '') {
return
}
const id = await generateID()
this.addRow({id: id, text: text, type: WormholeMessageType.Sent, pending: true})
this.send(id, text)
}
scrollToBottom = () => {
const component = this.listRef.current
if (component) {
const scrollHeight = component.scrollHeight
const height = component.clientHeight
const maxScrollTop = scrollHeight - height
component.scrollTop = maxScrollTop > 0 ? maxScrollTop : 0
}
}
renderItem = (index: number, key: string) => {
const row: WormholeMessage = this.state.rows[index]
let style
let className
let element
switch (row.type) {
case WormholeMessageType.Sent:
style = ownerStyle
className = 'message-owner'
element = <Typography>{row.text}</Typography>
break
case WormholeMessageType.Received:
style = otherStyle
className = 'message-other'
element = <Typography>{row.text}</Typography>
break
case WormholeMessageType.Status:
style = {}
className = 'message-status'
element = (
<Alert severity={row.severity as AlertColor}>
<Typography style={{display: 'inline'}}>{row.text}</Typography>
{row.link && (
<Typography style={{display: 'inline'}}>
<Link span onClick={() => shell.openExternal('https://' + row.link + '.html')}>
{row.link}
</Link>
.
</Typography>
)}
</Alert>
)
break
}
return (
<Box key={key} className={className} style={{...rowStyle, ...style}}>
{element}
</Box>
)
}
renderStartStop() {
const canStart = this.props.sender != '' && this.props.recipient != ''
const {loading, connected} = this.state
return (
<Box
display="flex"
flexDirection="row"
style={{paddingLeft: 8, paddingTop: 6, paddingBottom: 6, paddingRight: 8}}
>
{!loading && !connected && (
<Button
color="primary"
variant="outlined"
onClick={this.start}
disabled={!canStart}
style={{width: 110}}
>
Start
</Button>
)}
{(loading || connected) && (
<Button color="secondary" variant="outlined" onClick={this.close} style={{width: 110}}>
{this.state.loading ? 'Cancel' : 'Disconnect'}
</Button>
)}
</Box>
)
}
render() {
const {loading, connected} = this.state
return (
<Box display="flex" flexDirection="column" flex={1} style={{height: '100%', position: 'relative'}}>
<Header loading={loading} />
<Divider />
<Box display="flex" flexDirection="row" style={{paddingTop: 28}}>
<Box display="flex" flexDirection="column" flex={1}>
<Box
display="flex"
flexDirection="row"
flex={1}
style={{paddingLeft: 8, paddingTop: 8, paddingRight: 2}}
>
<Typography style={{paddingRight: 4, paddingTop: 2, paddingBottom: 8, width: 40}} align="right">
To:
</Typography>
<Autocomplete
identity={this.props.recipient}
disabled={this.state.loading || this.state.connected}
onChange={this.props.setRecipient}
keyTypes={[EDX25519, EDX25519Public]}
style={{width: '100%'}}
addOptions={true}
/>
</Box>
<Divider />
<Box
display="flex"
flexDirection="row"
flex={1}
style={{paddingLeft: 8, paddingTop: 8, paddingRight: 2}}
>
<Typography style={{paddingRight: 4, paddingTop: 2, paddingBottom: 8, width: 40}} align="right">
From:
</Typography>
<Autocomplete
identity={this.props.sender}
disabled={this.state.loading || this.state.connected}
onChange={this.props.setSender}
keyTypes={[EDX25519]}
style={{width: '100%'}}
/>
</Box>
</Box>
{this.renderStartStop()}
</Box>
<Divider />
<div
ref={this.listRef}
style={{
position: 'absolute',
top: 107,
bottom: 40,
left: 0,
right: 0,
overflowY: 'auto',
}}
>
{this.state.rows.map((row, index) => this.renderItem(index, 'wormhole-row-' + index))}
</div>
<Box
display="flex"
flexDirection="column"
style={{backgroundColor: 'white', position: 'absolute', bottom: 0, left: 0, right: 0}}
>
<Divider />
<TextInputView
defaultValue=""
submitEditing={this.submitEditing}
disabled={!connected}
placeholder="Type your message..."
disabledPlaceholder=" "
/>
</Box>
</Box>
)
}
}
const rowStyle: CSSProperties = {
margin: 10,
whiteSpace: 'pre-wrap',
clear: 'both',
}
const otherStyle: CSSProperties = {
marginBottom: 2,
marginTop: 0,
padding: 10,
background: '#eee',
float: 'left',
display: 'inline-block',
maxWidth: '60%',
}
const ownerStyle: CSSProperties = {
marginBottom: 2,
marginTop: 0,
padding: 10,
float: 'right',
background: '#0084ff',
color: '#fff',
display: 'inline-block',
maxWidth: '60%',
}
export default (_: {}) => {
const {sender, recipient, messages} = store.useState((s) => ({
sender: s.sender,
recipient: s.recipient,
messages: s.messages,
}))
const setRecipient = (recipient?: string) => {
store.update((s) => {
s.recipient = recipient || ''
})
}
const setSender = (sender?: string) => {
store.update((s) => {
s.sender = sender || ''
})
}
const setMessages = (messages: WormholeMessage[]) => {
store.update((s) => {
s.messages = messages
})
}
return (
<WormholeView
sender={sender}
recipient={recipient}
messages={messages}
setSender={setSender}
setRecipient={setRecipient}
setMessages={setMessages}
/>
)
} | the_stack |
import { XrplNetwork, Wallet } from 'xpring-common-js'
import {
Flags,
LimitAmount,
Amount,
TransferRate,
Destination,
TakerPays,
QualityIn,
QualityOut,
} from './Generated/web/org/xrpl/rpc/v1/common_pb'
import { AccountAddress } from './Generated/web/org/xrpl/rpc/v1/account_pb'
import {
Currency,
CurrencyAmount,
IssuedCurrencyAmount,
XRPDropsAmount,
} from './Generated/web/org/xrpl/rpc/v1/amount_pb'
import {
TrustSet,
AccountSet,
Payment,
Transaction,
} from './Generated/web/org/xrpl/rpc/v1/transaction_pb'
import isNode from '../Common/utils'
import CoreXrplClient from './core-xrpl-client'
import GrpcNetworkClient from './network-clients/grpc-xrp-network-client'
import GrpcNetworkClientWeb from './network-clients/grpc-xrp-network-client.web'
import { GrpcNetworkClientInterface } from './network-clients/grpc-network-client-interface'
import { XrpError, XrpErrorType } from './shared'
import { AccountSetFlag } from './shared/account-set-flag'
import TransactionResult from './shared/transaction-result'
import GatewayBalances, {
gatewayBalancesFromResponse,
} from './shared/gateway-balances'
import TrustLine from './shared/trustline'
import {
AccountLinesResponse,
AccountLinesSuccessfulResponse,
WebSocketFailureResponse,
TransactionResponse,
GatewayBalancesResponse,
GatewayBalancesSuccessfulResponse,
RipplePathFindSuccessfulResponse,
PathElement,
ResponseStatus,
SourceCurrency,
} from './shared/rippled-web-socket-schema'
import { WebSocketNetworkClientInterface } from './network-clients/web-socket-network-client-interface'
import WebSocketNetworkClient from './network-clients/web-socket-network-client'
import {
Expiration,
OfferSequence,
SendMax,
TakerGets,
} from 'xpring-common-js/build/src/XRP/generated/org/xrpl/rpc/v1/common_pb'
import { RippledErrorMessages } from './shared/rippled-error-messages'
import TrustSetFlag from './shared/trust-set-flag'
import { BigNumber } from 'bignumber.js'
import {
OfferCancel,
OfferCreate,
} from 'xpring-common-js/build/src/XRP/generated/org/xrpl/rpc/v1/transaction_pb'
import IssuedCurrency from './shared/issued-currency'
import XrpUtils from './shared/xrp-utils'
import OfferCreateFlag from './shared/offer-create-flag'
/**
* IssuedCurrencyClient is a client for working with Issued Currencies on the XRPL.
* @see https://xrpl.org/issued-currencies-overview.html
*/
export default class IssuedCurrencyClient {
private coreXrplClient: CoreXrplClient
/**
* Create a new IssuedCurrencyClient.
*
* The IssuedCurrencyClient will use gRPC to communicate with the given endpoint.
*
* @param grpcUrl The URL of the gRPC instance to connect to.
* @param network The network this IssuedCurrencyClient is connecting to.
* @param forceWeb If `true`, then we will use the gRPC-Web client even when on Node. Defaults to false. This is mainly for testing and in the future will be removed when we have browser testing.
*/
public static issuedCurrencyClientWithEndpoint(
grpcUrl: string,
webSocketUrl: string,
handleWebSocketErrorMessage: (data: string) => void,
network: XrplNetwork,
forceWeb = false,
): IssuedCurrencyClient {
const grpcNetworkClient =
isNode() && !forceWeb
? new GrpcNetworkClient(grpcUrl)
: new GrpcNetworkClientWeb(grpcUrl)
return new IssuedCurrencyClient(
grpcNetworkClient,
new WebSocketNetworkClient(webSocketUrl, handleWebSocketErrorMessage),
network,
)
}
/**
* Create a new IssuedCurrencyClient with a custom network client implementation.
*
* In general, clients should prefer to call `issuedCurrencyClientWithEndpoint`. This constructor is provided to improve testability of this class.
*
* @param networkClient A network client which will manage remote RPCs to Rippled.
* @param network The network this IssuedCurrencyClient is connecting to.
*/
public constructor(
grpcNetworkClient: GrpcNetworkClientInterface,
readonly webSocketNetworkClient: WebSocketNetworkClientInterface,
readonly network: XrplNetwork,
) {
this.coreXrplClient = new CoreXrplClient(grpcNetworkClient, network)
}
/**
* Retrieves information about an account's trust lines, which maintain balances of all non-XRP currencies and assets.
* @see https://xrpl.org/trust-lines-and-issuing.html
*
* @param account The account for which to retrieve associated trust lines, encoded as an X-Address.
* @param peerAccount (Optional) The address of a second account, encoded as an X-Address.
* If provided, show only trust lines connecting the two accounts.
* @see https://xrpaddress.info/
* @returns An array of TrustLine objects, representing all trust lines associated with this account.
*/
public async getTrustLines(
account: string,
peerAccount?: string,
): Promise<Array<TrustLine>> {
const classicAddress = XrpUtils.decodeXAddress(account)
if (!classicAddress) {
throw XrpError.xAddressRequired
}
if (peerAccount) {
const peerClassicAddress = XrpUtils.decodeXAddress(peerAccount)
if (!peerClassicAddress) {
throw XrpError.xAddressRequired
}
}
const accountLinesResponse: AccountLinesResponse = await this.webSocketNetworkClient.getAccountLines(
classicAddress.address,
peerAccount,
)
const error = (accountLinesResponse as WebSocketFailureResponse).error
if (error) {
if (error === RippledErrorMessages.accountNotFound) {
throw XrpError.accountNotFound
} else {
throw new XrpError(XrpErrorType.Unknown, error)
}
}
const accountLinesSuccessfulResponse = accountLinesResponse as AccountLinesSuccessfulResponse
const rawTrustLines = accountLinesSuccessfulResponse.result.lines
if (rawTrustLines === undefined) {
throw XrpError.malformedResponse
}
const trustLines: Array<TrustLine> = []
rawTrustLines.map((trustline) => {
trustLines.push(new TrustLine(trustline))
})
return trustLines
}
/**
* Returns information about the total balances issued by a given account,
* optionally excluding amounts held by operational addresses.
* @see https://xrpl.org/issuing-and-operational-addresses.html
*
* @param account The account for which to retrieve balance information, encoded as an X-Address.
* @param accountsToExclude (Optional) An array of operational addresses to exclude from the balances issued, encoded as X-Addresses.
* @see https://xrpaddress.info/
* @returns A GatewayBalances object containing information about an account's balances.
*/
public async getGatewayBalances(
account: string,
accountsToExclude: Array<string> = [],
): Promise<GatewayBalances> {
// check issuing account for X-Address format
const classicAddress = XrpUtils.decodeXAddress(account)
if (!classicAddress) {
throw XrpError.xAddressRequired
}
// check excludable addresses for X-Address format, and convert to classic addresses for request
const classicAddressesToExclude = accountsToExclude.map((xAddress) => {
const excludeClassicAddress = XrpUtils.decodeXAddress(xAddress)
if (!excludeClassicAddress) {
throw XrpError.xAddressRequired
}
return classicAddress.address
})
const gatewayBalancesResponse: GatewayBalancesResponse = await this.webSocketNetworkClient.getGatewayBalances(
classicAddress.address,
classicAddressesToExclude,
)
const error = (gatewayBalancesResponse as WebSocketFailureResponse).error
if (!error) {
return gatewayBalancesFromResponse(
gatewayBalancesResponse as GatewayBalancesSuccessfulResponse,
)
}
switch (error) {
case RippledErrorMessages.accountNotFound:
throw XrpError.accountNotFound
case RippledErrorMessages.invalidExcludedAddress:
throw new XrpError(
XrpErrorType.InvalidInput,
'The address(es) supplied to for exclusion were invalid.',
)
default:
throw new XrpError(XrpErrorType.Unknown, error)
}
}
/**
* Subscribes to all transactions that affect the specified account, and triggers a callback upon
* receiving each transaction.
* @see https://xrpl.org/monitor-incoming-payments-with-websocket.html
* @see https://xrpl.org/subscribe.html
*
* @param account The account for which to subscribe to relevant transactions, encoded as an X-Address.
* @param callback The function to trigger upon receiving a transaction event from the ledger.
* @returns Whether the request to subscribe succeeded.
*/
public async monitorAccountTransactions(
account: string,
callback: (data: TransactionResponse) => void,
): Promise<boolean> {
const classicAddress = XrpUtils.decodeXAddress(account)
if (!classicAddress) {
throw XrpError.xAddressRequired
}
const response = await this.webSocketNetworkClient.subscribeToAccount(
classicAddress.address,
callback,
)
return response.status === ResponseStatus.success
}
/**
* Unsubscribes from transactions that affect the specified account.
* @see https://xrpl.org/unsubscribe.html
*
* @param account The account from which to unsubscribe from, encoded as an X-Address.
* @returns Whether the request to unsubscribe succeeded.
*/
public async stopMonitoringAccountTransactions(
account: string,
): Promise<boolean> {
const classicAddress = XrpUtils.decodeXAddress(account)
if (!classicAddress) {
throw XrpError.xAddressRequired
}
const response = await this.webSocketNetworkClient.unsubscribeFromAccount(
classicAddress.address,
)
return response.status === ResponseStatus.success
}
/**
* Enable Require Authorization for this XRPL account.
*
* @see https://xrpl.org/become-an-xrp-ledger-gateway.html#require-auth
*
* @param wallet The wallet associated with the XRPL account enabling Require Authorization and that will sign the request.
* @returns A promise which resolves to a TransactionResult object that represents the result of this transaction.
*/
public async requireAuthorizedTrustlines(
wallet: Wallet,
): Promise<TransactionResult> {
return this.coreXrplClient.changeFlag(
AccountSetFlag.asfRequireAuth,
true,
wallet,
)
}
/**
* Disable Require Authorization for this XRPL account.
*
* @see https://xrpl.org/become-an-xrp-ledger-gateway.html#require-auth
*
* @param wallet The wallet associated with the XRPL account disabling Require Authorization and that will sign the request.
* @returns A promise which resolves to a TransactionResult object that represents the result of this transaction.
*/
public async allowUnauthorizedTrustlines(
wallet: Wallet,
): Promise<TransactionResult> {
return this.coreXrplClient.changeFlag(
AccountSetFlag.asfRequireAuth,
false,
wallet,
)
}
/**
* Enable Default Ripple for this XRPL account.
*
* @see https://xrpl.org/become-an-xrp-ledger-gateway.html#default-ripple
*
* @param wallet The wallet associated with the XRPL account enabling Default Ripple and that will sign the request.
* @returns A promise which resolves to a TransactionResult object that represents the result of this transaction.
*/
public async enableRippling(wallet: Wallet): Promise<TransactionResult> {
return this.coreXrplClient.changeFlag(
AccountSetFlag.asfDefaultRipple,
true,
wallet,
)
}
/**
* Enable Disallow XRP for this XRPL account.
* Note that the meaning of this flag is not enforced by rippled, and is only intended for use by client applications.
*
* @see https://xrpl.org/become-an-xrp-ledger-gateway.html#disallow-xrp
*
* @param wallet The wallet associated with the XRPL account enabling Disallow XRP and that will sign the request.
* @returns A promise which resolves to a TransactionResult object that represents the result of this transaction.
*/
public async disallowIncomingXrp(wallet: Wallet): Promise<TransactionResult> {
return this.coreXrplClient.changeFlag(
AccountSetFlag.asfDisallowXRP,
true,
wallet,
)
}
/**
* Disable Disallow XRP for this XRPL account.
* Note that the meaning of this flag is not enforced by rippled, and is only intended for use by client applications.
*
* @see https://xrpl.org/become-an-xrp-ledger-gateway.html#disallow-xrp
*
* @param wallet The wallet associated with the XRPL account disabling Disallow XRP and that will sign the request.
* @returns A promise which resolves to a TransactionResult object that represents the result of this transaction.
*/
public async allowIncomingXrp(wallet: Wallet): Promise<TransactionResult> {
return this.coreXrplClient.changeFlag(
AccountSetFlag.asfDisallowXRP,
false,
wallet,
)
}
/**
* Enable Require Destination Tags for this XRPL account.
*
* @see https://xrpl.org/require-destination-tags.html
*
* @param wallet The wallet associated with the XRPL account enabling Require Destination Tags and that will sign the request.
* @returns A promise which resolves to a TransactionResult object that represents the result of this transaction.
*/
public async requireDestinationTags(
wallet: Wallet,
): Promise<TransactionResult> {
return this.coreXrplClient.changeFlag(
AccountSetFlag.asfRequireDest,
true,
wallet,
)
}
/**
* Disable Require Destination for this XRPL account.
*
* @see https://xrpl.org/require-destination-tags.html
*
* @param wallet The wallet associated with the XRPL account disabling Require Destination and that will sign the request.
* @returns A promise which resolves to a TransactionResult object that represents the result of this transaction.
*/
public async allowNoDestinationTag(
wallet: Wallet,
): Promise<TransactionResult> {
return this.coreXrplClient.changeFlag(
AccountSetFlag.asfRequireDest,
false,
wallet,
)
}
/**
* Get the Transfer Fees for a given issuing account.
* The Transfer Fee is a percentage to charge when two users transfer an issuer's IOUs on the XRPL.
*
* @see https://xrpl.org/transfer-fees.html
*
* @param address The X-address for which the transfer rate is requested.
* @returns A promise which resolves to a number that represents the transfer fee associated with that issuing account,
* or undefined if one is not specified.
*/
public async getTransferFee(address: string): Promise<number | undefined> {
const classicAddress = XrpUtils.decodeXAddress(address)
if (!classicAddress) {
throw XrpError.xAddressRequired
}
const accountRoot = await this.coreXrplClient.getAccountData(
classicAddress.address,
)
return accountRoot.getTransferRate()?.getValue()
}
/**
* Set the Transfer Fees for an issuing account.
* The Transfer Fee is a percentage to charge when two users transfer an issuer's IOUs on the XRPL.
*
* @see https://xrpl.org/transfer-fees.html
*
* @param transferFee The amount you must send for the recipient to get 1 billion units of the same currency.
* It cannot be set to less than 1000000000 or more than 2000000000.
* @param wallet The wallet associated with the issuing account, and that will sign the request.
* @returns A promise which resolves to a TransactionResult object that represents the result of this transaction.
*/
public async setTransferFee(
transferFee: number,
wallet: Wallet,
): Promise<TransactionResult> {
const transferRate = new TransferRate()
transferRate.setValue(transferFee)
const accountSet = new AccountSet()
accountSet.setTransferRate(transferRate)
const transaction = await this.coreXrplClient.prepareBaseTransaction(wallet)
transaction.setAccountSet(accountSet)
const transactionHash = await this.coreXrplClient.signAndSubmitTransaction(
transaction,
wallet,
)
return this.coreXrplClient.getFinalTransactionResultAsync(
transactionHash,
wallet,
)
}
/**
* Enable Global Freeze for this XRPL account.
*
* @see https://xrpl.org/freezes.html#global-freeze
*
* @param wallet The wallet associated with the XRPL account enabling Global Freeze and that will sign the request.
* @returns A promise which resolves to a TransactionResult object that represents the result of this transaction.
*/
public async enableGlobalFreeze(wallet: Wallet): Promise<TransactionResult> {
return this.coreXrplClient.changeFlag(
AccountSetFlag.asfGlobalFreeze,
true,
wallet,
)
}
/**
* Disable Global Freeze for this XRPL account.
*
* @see https://xrpl.org/freezes.html#global-freeze
*
* @param wallet The wallet associated with the XRPL account disabling Global Freeze and that will sign the request.
* @returns A promise which resolves to a TransactionResult object that represents the result of this transaction.
*/
public async disableGlobalFreeze(wallet: Wallet): Promise<TransactionResult> {
return this.coreXrplClient.changeFlag(
AccountSetFlag.asfGlobalFreeze,
false,
wallet,
)
}
/**
* Permanently enable No Freeze for this XRPL account.
*
* @see https://xrpl.org/freezes.html#no-freeze
*
* @param wallet The wallet associated with the XRPL account enabling No Freeze and that will sign the request.
* @returns A promise which resolves to a TransactionResult object that represents the result of this transaction.
*/
public async enableNoFreeze(wallet: Wallet): Promise<TransactionResult> {
return this.coreXrplClient.changeFlag(
AccountSetFlag.asfNoFreeze,
true,
wallet,
)
}
/**
* Creates a trust line between this XRPL account and an issuer of an IssuedCurrency.
*
* @see https://xrpl.org/trustset.html
*
* @param issuerXAddress The X-Address of the issuer to extend trust to.
* @param currencyName The currency this trust line applies to, as a three-letter ISO 4217 Currency Code or a 160-bit hex value according to currency format.
* @param amount Decimal representation of the limit to set on this trust line.
* @param wallet The wallet creating the trustline.
* @param qualityIn (Optional) Value incoming balances on this trust line at the ratio of this number per 1,000,000,000 units.
* A value of 0 is shorthand for treating balances at face value.
* @param qualityOut (Optional) Value outgoing balances on this trust line at the ratio of this number per 1,000,000,000 units.
* A value of 0 is shorthand for treating balances at face value.
*/
public async createTrustLine(
issuerXAddress: string,
currencyName: string,
amount: string,
wallet: Wallet,
qualityIn?: number,
qualityOut?: number,
): Promise<TransactionResult> {
return this.sendTrustSetTransaction(
issuerXAddress,
currencyName,
amount,
undefined,
wallet,
qualityIn,
qualityOut,
)
}
/**
* Creates an authorized trust line between this XRPL account (issuing account) and another account.
* Note that the other account must also create a trust line to this issuing account in order to establish a trust line with a non-zero limit.
* If this method is called before the other account creates a trust line, a trust line with a limit of 0 is created.
* However, this is only true if this issuing account has already required Authorized Trustlines (see https://xrpl.org/authorized-trust-lines.html),
* otherwise no trust line is created.
*
* @see https://xrpl.org/authorized-trust-lines.html
*
* @param accountToAuthorize The X-Address of the address with which to authorize a trust line.
* @param currencyName The currency to authorize a trust line for.
* @param wallet The wallet creating the authorized trust line.
*/
public async authorizeTrustLine(
accountToAuthorize: string,
currencyName: string,
wallet: Wallet,
): Promise<TransactionResult> {
// When authorizing a trust line, the value of the trust line is set to 0.
// See https://xrpl.org/authorized-trust-lines.html#authorizing-trust-lines
return this.sendTrustSetTransaction(
accountToAuthorize,
currencyName,
'0',
TrustSetFlag.tfSetfAuth,
wallet,
)
}
/**
* Freezes the trust line between this account (issuing account) and another account.
* Note that the trust line's limit is set to 0.
*
* @see https://xrpl.org/freezes.html#enabling-or-disabling-individual-freeze
*
* @param trustLinePeerAccount The X-Address of the account involved in the trust line being frozen.
* @param currencyName The currency of the trust line to freeze.
* @param wallet The wallet freezing the trust line.
*/
public async freezeTrustLine(
trustLinePeerAccount: string,
currencyName: string,
wallet: Wallet,
): Promise<TransactionResult> {
return this.sendTrustSetTransaction(
trustLinePeerAccount,
currencyName,
// You can change the trust line when you freeze it, but an amount of 0
// would be the most conservative amount.
'0',
TrustSetFlag.tfSetFreeze,
wallet,
)
}
/**
* Unfreezes the trust line between this account (issuing account) and another account.
* Note that the trust line's limit is set to 0.
*
* @see https://xrpl.org/freezes.html#enabling-or-disabling-individual-freeze
*
* @param trustLinePeerAccount The X-Address of the account involved in the trust line being unfrozen.
* @param currencyName The currency of the trust line to unfreeze.
* @param wallet The wallet unfreezing the trust line.
*/
public async unfreezeTrustLine(
trustLinePeerAccount: string,
currencyName: string,
wallet: Wallet,
): Promise<TransactionResult> {
return this.sendTrustSetTransaction(
trustLinePeerAccount,
currencyName,
// You can change the trust line amount when you unfreeze it, but this would typically
// be used by gateways, who will maintain an amount of 0.
'0',
TrustSetFlag.tfClearFreeze,
wallet,
)
}
/**
* Disables rippling on the trust line between this account (issuing account) and another account.
*
* @see https://xrpl.org/rippling.html#enabling-disabling-no-ripple
*
* @param trustLinePeerAccount The X-Address of the account involved in the trust line being disabled to ripple.
* @param currencyName The currency of the trust line being disbaled to ripple.
* @param amount The maximum amount of debt to allow on this trust line.
* @param wallet The wallet disabling rippling on the trust line.
*/
public async disableRipplingForTrustLine(
trustLinePeerAccount: string,
currencyName: string,
amount: string,
wallet: Wallet,
): Promise<TransactionResult> {
return this.sendTrustSetTransaction(
trustLinePeerAccount,
currencyName,
amount,
TrustSetFlag.tfSetNoRipple,
wallet,
)
}
/**
* Re-enables rippling on the trust line between this account (issuing account) and another account.
*
* @see https://xrpl.org/rippling.html#enabling-disabling-no-ripple
*
* @param trustLinePeerAccount trustLinePeerAccount The X-Address of the account involved in the trust line being re-enabled to ripple.
* @param currencyName The currency of the trust line being re-enabled to ripple.
* @param amount The maximum amount of debt to allow on this trust line.
* @param wallet The wallet re-enabling rippling on the trust line.
*/
public async enableRipplingForTrustLine(
trustLinePeerAccount: string,
currencyName: string,
amount: string,
wallet: Wallet,
): Promise<TransactionResult> {
return this.sendTrustSetTransaction(
trustLinePeerAccount,
currencyName,
amount,
TrustSetFlag.tfClearNoRipple,
wallet,
)
}
/*
* Creates and sends a TrustSet transaction to the XRPL.
*
* @param accountToTrust The account to extend trust to with a trust line.
* @param currencyName The name of the currency to create a trust line for.
* @param amount The maximum amount of debt to allow on this trust line.
* @param wallet A wallet associated with the account extending trust.
* @param qualityIn (Optional) Value incoming balances on this trust line at the ratio of this number per 1,000,000,000 units.
* A value of 0 is shorthand for treating balances at face value.
* @param qualityOut (Optional) Value outgoing balances on this trust line at the ratio of this number per 1,000,000,000 units.
* A value of 0 is shorthand for treating balances at face value.
*/
private async sendTrustSetTransaction(
accountToTrust: string,
currencyName: string,
amount: string,
flags: number | undefined,
wallet: Wallet,
qualityIn?: number,
qualityOut?: number,
): Promise<TransactionResult> {
const trustSetTransaction = await this.prepareTrustSetTransaction(
accountToTrust,
currencyName,
amount,
flags,
wallet,
qualityIn,
qualityOut,
)
const transactionHash = await this.coreXrplClient.signAndSubmitTransaction(
trustSetTransaction,
wallet,
)
return this.coreXrplClient.getFinalTransactionResultAsync(
transactionHash,
wallet,
)
}
/**
* Prepares a TrustSet transaction to be sent and executed on the XRPL.
*
* @param accountToTrust The account to extend trust to with a trust line.
* @param currencyName The name of the currency to create a trust line for.
* @param amount The maximum amount of debt to allow on this trust line.
* @param wallet A wallet associated with the account extending trust.
* @param qualityIn (Optional) Value incoming balances on this trust line at the ratio of this number per 1,000,000,000 units.
* A value of 0 is shorthand for treating balances at face value.
* @param qualityOut (Optional) Value outgoing balances on this trust line at the ratio of this number per 1,000,000,000 units.
* A value of 0 is shorthand for treating balances at face value.
*/
private async prepareTrustSetTransaction(
accountToTrust: string,
currencyName: string,
amount: string,
flags: number | undefined,
wallet: Wallet,
qualityInAmount?: number,
qualityOutAmount?: number,
): Promise<Transaction> {
if (!XrpUtils.isValidXAddress(accountToTrust)) {
throw XrpError.xAddressRequired
}
const classicAddress = XrpUtils.decodeXAddress(accountToTrust)
if (!classicAddress) {
throw XrpError.xAddressRequired
}
// TODO (tedkalaw): Use X-Address directly when ripple-binary-codec supports X-Addresses.
const issuerAccountAddress = new AccountAddress()
issuerAccountAddress.setAddress(classicAddress.address)
if (currencyName === 'XRP') {
throw new XrpError(
XrpErrorType.InvalidInput,
'prepareTrustSetTransaction: Trust lines can only be created for Issued Currencies',
)
}
const currency = new Currency()
currency.setName(currencyName)
const issuedCurrencyAmount = new IssuedCurrencyAmount()
issuedCurrencyAmount.setCurrency(currency)
issuedCurrencyAmount.setIssuer(issuerAccountAddress)
// TODO (tedkalaw): Support other types of amounts (number, bigInt, etc)
issuedCurrencyAmount.setValue(amount)
const currencyAmount = new CurrencyAmount()
currencyAmount.setIssuedCurrencyAmount(issuedCurrencyAmount)
const limit = new LimitAmount()
limit.setValue(currencyAmount)
const trustSet = new TrustSet()
trustSet.setLimitAmount(limit)
if (qualityInAmount) {
const qualityIn = new QualityIn()
qualityIn.setValue(qualityInAmount)
trustSet.setQualityIn(qualityIn)
}
if (qualityOutAmount) {
const qualityOut = new QualityOut()
qualityOut.setValue(qualityOutAmount)
trustSet.setQualityOut(qualityOut)
}
const transaction = await this.coreXrplClient.prepareBaseTransaction(wallet)
transaction.setTrustSet(trustSet)
if (flags !== undefined) {
const transactionFlags = new Flags()
transactionFlags.setValue(flags)
transaction.setFlags(transactionFlags)
}
return transaction
}
/**
* Creates new issued currency on a trustline to the destination account. Note that the destination account must have a trustline
* extended to the sender of this transaction (the "issuer" of this issued currency) or no issued currency will be created.
*
* @param sender The Wallet creating the issued currency, and that will sign the transaction.
* @param destination The destination address (recipient) of the issued currency, encoded as an X-address (see https://xrpaddress.info/).
* @param currency The three-letter currency code of the issued currency being created.
* @param amount The amount of issued currency to create.
*/
public async createIssuedCurrency(
sender: Wallet,
destination: string,
currency: string,
amount: string,
): Promise<TransactionResult> {
const issuer = sender.getAddress()
const issuedCurrency = {
currency,
issuer,
value: amount,
}
return this.issuedCurrencyPayment(sender, destination, issuedCurrency)
}
/**
* Redeems issued currency back to the original issuer.
* Typically, this should trigger off-ledger action by the issuing institution.
*
* @param sender The Wallet redeeming the issued currency, and that will sign the transaction.
* @param issuer The original issuer of the issued currency, encoded as an X-address (see https://xrpaddress.info/).
* @param currency The three-letter currency code of the issued currency being redeemed.
* @param amount The amount of issued currency to redeem.
*/
public async redeemIssuedCurrency(
sender: Wallet,
issuedCurrency: IssuedCurrency,
): Promise<TransactionResult> {
// Redemption of issued currency is achieved by sending issued currency directly to the original issuer.
// However, the issuer field specified in the amount is treated as a special case in this circumstance, and should be
// set to the address of the account initiating the redemption.
// See: https://xrpl.org/payment.html#special-issuer-values-for-sendmax-and-amount
const specialIssuedCurrency: IssuedCurrency = {
...issuedCurrency,
issuer: sender.getAddress(),
}
return this.issuedCurrencyPayment(
sender,
issuedCurrency.issuer,
specialIssuedCurrency,
)
}
/**
* Sends issued currency from one (non-issuing) account to another.
*
* @param sender The Wallet from which issued currency will be sent, and that will sign the transaction.
* @param destination The destination address for the payment, encoded as an X-address (see https://xrpaddress.info/).
* @param issuedCurrency The issued currency being sent.
* @param transferFee (Optional) The transfer fee associated with the issuing account, expressed as a percentage. (i.e. a value of .5 indicates
* a 0.5% transfer fee). Supply this field for automatic calculation of the sendMax value for this payment.
* Either this or sendMaxvalue may be specified, but not both.
* @param sendMaxValue (Optional) A manual specification of the maximum amount of source currency this payment is allowed to cost,
* including transfer fees, exchange rates, and slippage. Does not include the XRP destroyed as a cost for submitting
* the transaction. Either this or transferFee may be specified, but not both.
*/
public async sendIssuedCurrencyPayment(
sender: Wallet,
destination: string,
issuedCurrency: IssuedCurrency,
transferFee?: number,
sendMaxValue?: string,
): Promise<TransactionResult> {
if (sender.getAddress() === issuedCurrency.issuer) {
throw new XrpError(
XrpErrorType.InvalidInput,
'The sending address cannot be the same as the issuing address. To create issued currency, use `createIssuedCurrency`.',
)
}
if (destination === issuedCurrency.issuer) {
throw new XrpError(
XrpErrorType.InvalidInput,
'The destination address cannot be the same as the issuer. To redeem issued currency, use `redeemIssuedCurrency`.',
)
}
return this.issuedCurrencyPayment(
sender,
destination,
issuedCurrency,
transferFee,
sendMaxValue,
)
}
/**
* Send a cross-currency payment. Note that the source and destination currencies cannot both be XRP.
* The XRPL is queried for viable paths as part of transaction construction. If no paths are found,
* an error is thrown and no transaction is submitted.
*
* @see https://xrpl.org/cross-currency-payments.html
*
* @param sender The wallet from which currency will be sent, and that will sign the transaction.
* @param destination The address of the payment's recipient, encoded as an X-Address.
* @param maxSourceAmount Highest amount of source currency this transaction is allowed to cost, including transfer fees,
* exchange rates, and slippage. Does not include the XRP destroyed as a cost for submitting the transaction.
* For XRP, specify amount as a string of drops.
* For issued currencies, specify an IssuedCurrency object with currency, issuer, and value fields.
* @param deliverAmount The amount of currency to deliver. For XRP, specify amount as a string of drops. For issued currencies,
* specify an IssuedCurrency object with currency, issuer, and value fields.
*/
public async sendCrossCurrencyPayment(
sender: Wallet,
destination: string,
maxSourceAmount: string | IssuedCurrency,
deliverAmount: string | IssuedCurrency,
): Promise<TransactionResult> {
if (!XrpUtils.isValidXAddress(destination)) {
throw new XrpError(
XrpErrorType.XAddressRequired,
'Destination address must be in X-address format. See https://xrpaddress.info/.',
)
}
// Both source amount and deliver amount can't both be XRP:
if (
XrpUtils.isString(deliverAmount) &&
XrpUtils.isString(maxSourceAmount)
) {
throw new XrpError(
XrpErrorType.InvalidInput,
'A cross-currency payment should involve at least one issued currency.',
)
}
if (
XrpUtils.isIssuedCurrency(deliverAmount) &&
XrpUtils.isIssuedCurrency(maxSourceAmount) &&
(deliverAmount as IssuedCurrency).currency ===
(maxSourceAmount as IssuedCurrency).currency &&
(deliverAmount as IssuedCurrency).issuer ===
(maxSourceAmount as IssuedCurrency).issuer
) {
throw new XrpError(
XrpErrorType.InvalidInput,
'A Cross Currency payment must specify different currencies for deliverAmount and maxSourceAmount.',
)
}
// Create representation of the currency to SOURCE
const sourceAmountProto = this.createCurrencyAmount(maxSourceAmount)
// Create representation of the currency to DELIVER
const deliverAmountProto = this.createAmount(deliverAmount)
// Create destination proto
const destinationAccountAddress = new AccountAddress()
destinationAccountAddress.setAddress(destination)
const destinationProto = new Destination()
destinationProto.setValue(destinationAccountAddress)
// Construct Payment fields
const payment = new Payment()
payment.setDestination(destinationProto)
// Note that the destinationTag doesn't need to be explicitly set here, because the ripple-binary-codec will decode this X-Address and
// assign the decoded destinationTag before signing.
payment.setAmount(deliverAmountProto)
// Assign sourceCurrencyAmount as sendMax for Payment
const sendMax = new SendMax()
sendMax.setValue(sourceAmountProto)
payment.setSendMax(sendMax)
// Determine if there is a viable path
const sourceCurrencyName = XrpUtils.isString(maxSourceAmount)
? 'XRP'
: (maxSourceAmount as IssuedCurrency).currency
const sourceCurrency: SourceCurrency = {
currency: sourceCurrencyName,
}
const sourceCurrencies = [sourceCurrency]
const sourceClassicAddress = XrpUtils.decodeXAddress(sender.getAddress())
if (!sourceClassicAddress) {
throw XrpError.xAddressRequired
}
const destinationClassicAddress = XrpUtils.decodeXAddress(destination)
if (!destinationClassicAddress) {
throw XrpError.xAddressRequired
}
let pathFindResponse = await this.webSocketNetworkClient.findRipplePath(
sourceClassicAddress.address,
destinationClassicAddress.address,
deliverAmount,
undefined,
sourceCurrencies,
)
// If failure response from websocket, throw
if (pathFindResponse.status !== 'success') {
pathFindResponse = pathFindResponse as WebSocketFailureResponse
throw new XrpError(XrpErrorType.Unknown, pathFindResponse.error_message)
}
// eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
pathFindResponse = pathFindResponse as RipplePathFindSuccessfulResponse
const pathAlternatives = pathFindResponse.result.alternatives
// If no viable paths exist, throw
if (pathAlternatives.length === 0) {
throw new XrpError(
XrpErrorType.NoViablePaths,
'No paths exist to execute cross-currency payment.',
)
}
// Otherwise, find the cheapest path
let currentBestPathset = pathAlternatives[0].paths_computed
let currentCheapestSourceAmount = new BigNumber(
// eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
(pathAlternatives[0].source_amount as IssuedCurrency).value,
)
pathAlternatives.forEach((possiblePath) => {
const numericSourceAmount = new BigNumber(
(possiblePath.source_amount as IssuedCurrency).value,
)
if (numericSourceAmount < currentCheapestSourceAmount) {
currentBestPathset = possiblePath.paths_computed
currentCheapestSourceAmount = numericSourceAmount
}
})
// Assign best pathset to Payment protobuf
currentBestPathset.forEach((path: PathElement[]) => {
const pathProto = new Payment.Path()
path.forEach((pathElement: PathElement) => {
const pathElementProto = new Payment.PathElement()
if (pathElement.account) {
const accountAddressProto = new AccountAddress()
accountAddressProto.setAddress(pathElement.account)
pathElementProto.setAccount(accountAddressProto)
}
if (pathElement.currency) {
const currencyProto = new Currency()
currencyProto.setName(pathElement.currency)
pathElementProto.setCurrency(currencyProto)
}
if (pathElement.issuer) {
const issuerAccountAddressProto = new AccountAddress()
issuerAccountAddressProto.setAddress(pathElement.issuer)
pathElementProto.setIssuer(issuerAccountAddressProto)
}
pathProto.addElements(pathElementProto)
})
payment.addPaths(pathProto)
})
const transaction = await this.coreXrplClient.prepareBaseTransaction(sender)
transaction.setPayment(payment)
const transactionHash = await this.coreXrplClient.signAndSubmitTransaction(
transaction,
sender,
)
return this.coreXrplClient.getFinalTransactionResultAsync(
transactionHash,
sender,
)
}
// TODO: (acorso) Make this method private and expose more opinionated public APIs.
// TODO: (acorso) structure this like we have `sendXrp` v.s. `sendXrpWithDetails` to allow for additional optional fields, such as memos.
// as well as potentially:
// TODO: (acorso) learn about partial payments and whether they're essential to offer WRT to issued currencies (https://xrpl.org/payment.html#partial-payments)
// TODO: (acorso) learn about other payment flags and whether they're essential to offer WRT to issued currencies (https://xrpl.org/payment.html#payment-flags)
/**
* Sends issued currency from one account to another. This method can be used to create issued currency, dispense issued currency from
* an operational address, send issued currency from one XRPL account to another (as long as the payment only involves a single currency,
* i.e. is not cross-currency), or to redeem issued currency at the issuing address.
* The specific case being executed is determined by the relationship among the parameters.
*
* @param sender The Wallet from which issued currency will be sent, and that will sign the transaction.
* @param destination The destination address (recipient) for the payment, encoded as an X-address (see https://xrpaddress.info/).
* @param issuedCurrency The issued currency being sent.
* @param transferFee (Optional) The transfer fee associated with the issuing account, expressed as a percentage. (i.e. a value of .5 indicates
* a 0.5% transfer fee). Supply this field for automatic calculation of the sendMax value for this payment.
* Either this or sendMaxvalue may be specified, but not both.
* @param sendMaxValue (Optional) A manual specification of the maximum amount of source currency this payment is allowed to cost,
* including transfer fees, exchange rates, and slippage. Does not include the XRP destroyed as a cost for submitting \
* the transaction. Either this or transferFee may be specified, but not both.
*/
public async issuedCurrencyPayment(
sender: Wallet,
destination: string,
issuedCurrency: IssuedCurrency,
transferFee?: number,
sendMaxValue?: string,
): Promise<TransactionResult> {
if (transferFee && sendMaxValue) {
throw new XrpError(
XrpErrorType.InvalidInput,
'Specify the `transferFee` or `sendMaxValue` fields, but not both.',
)
}
if (!XrpUtils.isValidXAddress(destination)) {
throw new XrpError(
XrpErrorType.XAddressRequired,
'Destination address must be in X-address format. See https://xrpaddress.info/.',
)
}
// TODO: (acorso) we don't need to convert back to a classic address once the ripple-binary-codec supports X-addresses for issued currencies.
const issuerClassicAddress = XrpUtils.decodeXAddress(issuedCurrency.issuer)
if (!issuerClassicAddress) {
throw new XrpError(
XrpErrorType.XAddressRequired,
'Issuer address must be in X-address format. See https://xrpaddress.info/.',
)
}
if (!issuerClassicAddress.address) {
throw new XrpError(
XrpErrorType.XAddressRequired,
'Decoded classic address is missing address field.',
)
}
const currencyProto = new Currency()
currencyProto.setName(issuedCurrency.currency)
const issuerAccountAddress = new AccountAddress()
issuerAccountAddress.setAddress(issuerClassicAddress.address)
const issuedCurrencyAmount = new IssuedCurrencyAmount()
issuedCurrencyAmount.setCurrency(currencyProto)
issuedCurrencyAmount.setIssuer(issuerAccountAddress)
issuedCurrencyAmount.setValue(issuedCurrency.value)
const currencyAmount = new CurrencyAmount()
currencyAmount.setIssuedCurrencyAmount(issuedCurrencyAmount)
const amountProto = new Amount()
amountProto.setValue(currencyAmount)
const destinationAccountAddress = new AccountAddress()
destinationAccountAddress.setAddress(destination)
const destinationProto = new Destination()
destinationProto.setValue(destinationAccountAddress)
// Construct Payment fields
const payment = new Payment()
payment.setDestination(destinationProto)
// Note that the destinationTag doesn't need to be explicitly set here, because the ripple-binary-codec will decode this X-Address and
// assign the decoded destinationTag before signing.
payment.setAmount(amountProto)
// If transferFee was supplied, calculate the sendMax value, otherwise use the manual override sendMaxValue.
// Either or both may be undefined.
const calculatedSendMaxValue = transferFee
? this.calculateSendMaxValue(issuedCurrency.value, transferFee)
: sendMaxValue
if (calculatedSendMaxValue) {
const sendMaxIssuedCurrencyAmount = new IssuedCurrencyAmount()
sendMaxIssuedCurrencyAmount.setCurrency(currencyProto)
sendMaxIssuedCurrencyAmount.setIssuer(issuerAccountAddress)
sendMaxIssuedCurrencyAmount.setValue(calculatedSendMaxValue)
const sendMaxCurrencyAmount = new CurrencyAmount()
sendMaxCurrencyAmount.setIssuedCurrencyAmount(sendMaxIssuedCurrencyAmount)
const sendMax = new SendMax()
sendMax.setValue(sendMaxCurrencyAmount)
payment.setSendMax(sendMax)
}
const transaction = await this.coreXrplClient.prepareBaseTransaction(sender)
transaction.setPayment(payment)
const transactionHash = await this.coreXrplClient.signAndSubmitTransaction(
transaction,
sender,
)
return this.coreXrplClient.getFinalTransactionResultAsync(
transactionHash,
sender,
)
}
/**
* Calculates the sendMaxValue by applying the transferFee to the amount being sent and ensuring the total amount
* fits within maximum decimal precision.
* @see https://xrpl.org/currency-formats.html#issued-currency-precision
*
* @param amount The amount of issued currency to pay to the destination.
* @param transferFee The transfer fee associated with the issuing account, expressed as a percentage.
* (i.e. a value of .5 indicates a 0.5% transfer fee).
*/
public calculateSendMaxValue(amount: string, transferFee: number): string {
if (transferFee < 0) {
throw new XrpError(
XrpErrorType.InvalidInput,
'Transfer Fee must be positive.',
)
}
// maximum allowed decimal precision for issued currency values.
const maxDecimalPrecision = 15
const numericAmount = new BigNumber(amount)
const transferRate = new BigNumber(1 + transferFee / 100)
// calculate the total sendMaxValue with any number of decimal places.
const rawSendMaxValue: BigNumber = numericAmount.multipliedBy(transferRate)
if (rawSendMaxValue.toFixed().length <= maxDecimalPrecision) {
return rawSendMaxValue.toFixed()
}
// If we have too many digits of precision, express with scientific notation but fit within decimal precision.
return rawSendMaxValue.toExponential(14, 0).replace(/e\+/, 'e')
}
/**
* Creates an offer on the XRP Ledger.
* @see https://xrpl.org/offers.html
* @see https://xrpl.org/offercreate.html#offercreate
*
* @param sender The Wallet creating this offer, and that will sign the transaction.
* @param takerGetsAmount The amount and type of currency being provided by the offer creator.
* Use a string to specify XRP, or an IssuedCurrency object to specify an issued currency.
* @param takerPaysAmount The amount and type of currency being requested by the offer creator.
* Use a string to specify XRP, or an IssuedCurrency object to specify an issued currency.
* @param offerSequence (Optional) An offer to delete first, specified by the sequence number of a previous OfferCreate transaction.
* If specified, cancel any offer object in the ledger that was created by that transaction.
* It is not considered an error if the offer specified does not exist.
* @param expiration (Optional) Time after which the offer is no longer active, in seconds since the Ripple Epoch.
* (See https://xrpl.org/basic-data-types.html#specifying-time)
* @param passive (Optional, defaults to false) If enabled, the offer does not consume offers that exactly match it, and instead becomes
* an Offer object in the ledger. It still consumes offers that cross it.
* @param immediateOrCancel (Optional, defaults to false) Treat the offer as an Immediate or Cancel order. If enabled, the offer never becomes
* a ledger object: it only tries to match existing offers in the ledger. If the offer cannot match any offers
* immediately, it executes "successfully" without trading any currency. In this case, the transaction has the
* result code tesSUCCESS, but creates no Offer objects in the ledger.
* (see https://en.wikipedia.org/wiki/Immediate_or_cancel)
* @param fillOrKill (Optional, defaults to false) Treat the offer as a Fill or Kill order. Only try to match existing offers in the ledger,
* and only do so if the entire TakerPays quantity can be obtained. If the fix1578 amendment is enabled and
* the offer cannot be executed when placed, the transaction has the result code tecKILLED; otherwise, the
* transaction uses the result code tesSUCCESS even when it was killed without trading any currency.
* (see https://en.wikipedia.org/wiki/Fill_or_kill)
* @param sell (Optional, defaults to false) Exchange the entire TakerGets amount, even if it means obtaining more than the TakerPays amount in exchange.
*/
public async createOffer(
sender: Wallet,
takerGetsAmount: string | IssuedCurrency,
takerPaysAmount: string | IssuedCurrency,
offerSequence?: number,
expiration?: number,
passive?: boolean,
immediateOrCancel?: boolean,
fillOrKill?: boolean,
sell?: boolean,
): Promise<TransactionResult> {
const takerGetsCurrencyAmount = this.createCurrencyAmount(takerGetsAmount)
const takerGets = new TakerGets()
takerGets.setValue(takerGetsCurrencyAmount)
const takerPaysCurrencyAmount = this.createCurrencyAmount(takerPaysAmount)
const takerPays = new TakerPays()
takerPays.setValue(takerPaysCurrencyAmount)
const offerCreate = new OfferCreate()
offerCreate.setTakerGets(takerGets)
offerCreate.setTakerPays(takerPays)
if (offerSequence) {
const offerSequenceProto = new OfferSequence()
offerSequenceProto.setValue(offerSequence)
offerCreate.setOfferSequence(offerSequenceProto)
}
if (expiration) {
const expirationProto = new Expiration()
expirationProto.setValue(expiration)
offerCreate.setExpiration(expirationProto)
}
let flagsValue = 0
if (passive) {
flagsValue |= OfferCreateFlag.TF_PASSIVE
}
if (immediateOrCancel) {
flagsValue |= OfferCreateFlag.TF_IMMEDIATE_OR_CANCEL
}
if (fillOrKill) {
flagsValue |= OfferCreateFlag.TF_FILL_OR_KILL
}
if (sell) {
flagsValue |= OfferCreateFlag.TF_SELL
}
const flags = new Flags()
flags.setValue(flagsValue)
const transaction = await this.coreXrplClient.prepareBaseTransaction(sender)
transaction.setOfferCreate(offerCreate)
if (flagsValue != 0) {
transaction.setFlags(flags)
}
const transactionHash = await this.coreXrplClient.signAndSubmitTransaction(
transaction,
sender,
)
return this.coreXrplClient.getFinalTransactionResultAsync(
transactionHash,
sender,
)
}
/**
* Cancels an offer on the XRP Ledger.
* @see https://xrpl.org/offers.html
* @see https://xrpl.org/offercancel.html#offercancel
*
* @param sender The Wallet cancelling this offer, and that will sign the transaction.
* @param offerSequence The sequence number of a previous OfferCreate transaction.
* If specified, cancel any offer object in the ledger that was created by that transaction.
* It is not considered an error if the offer specified does not exist.
*/
public async cancelOffer(
sender: Wallet,
offerSequence: number,
): Promise<TransactionResult> {
const offerSequenceProto = new OfferSequence()
offerSequenceProto.setValue(offerSequence)
const offerCancel = new OfferCancel()
offerCancel.setOfferSequence(offerSequenceProto)
const transaction = await this.coreXrplClient.prepareBaseTransaction(sender)
transaction.setOfferCancel(offerCancel)
const transactionHash = await this.coreXrplClient.signAndSubmitTransaction(
transaction,
sender,
)
return this.coreXrplClient.getFinalTransactionResultAsync(
transactionHash,
sender,
)
}
/**
* Constructs and returns a CurrencyAmount protobuf that represents either an XRP drops amount or Issued Currency amount, which can then be
* assigned to a higher order protobuf that requires a CurrencyAmount.
*
* @param amount The string (for XRP amounts) or IssuedCurrency object (for issued currencies) from which to construct a CurrencyAmount protobuf.
*/
private createCurrencyAmount(
amount: string | IssuedCurrency,
): CurrencyAmount {
const currencyAmount = new CurrencyAmount()
if (XrpUtils.isString(amount)) {
const xrpDropsAmount = new XRPDropsAmount()
xrpDropsAmount.setDrops(amount as string)
currencyAmount.setXrpAmount(xrpDropsAmount)
} else {
const currency = new Currency()
currency.setName((amount as IssuedCurrency).currency)
const accountAddress = new AccountAddress()
accountAddress.setAddress((amount as IssuedCurrency).issuer)
const issuedCurrencyAmount = new IssuedCurrencyAmount()
issuedCurrencyAmount.setIssuer(accountAddress)
issuedCurrencyAmount.setCurrency(currency)
issuedCurrencyAmount.setValue((amount as IssuedCurrency).value)
currencyAmount.setIssuedCurrencyAmount(issuedCurrencyAmount)
}
return currencyAmount
}
/**
* Constructs and returns an Amount protocol buffer object with all sub-objects such as an XRPDropsAmount or IssuedCurrencyAmount.
*
* @param amount The string (for XRP amounts) or IssuedCurrency object (for issued currencies) from which to construct a Amount protobuf.
*/
private createAmount(amount: string | IssuedCurrency): Amount {
const currencyAmount = this.createCurrencyAmount(amount)
const amountProto = new Amount()
amountProto.setValue(currencyAmount)
return amountProto
}
} | the_stack |
const {assert} = chai;
import * as SDK from '../../../../../front_end/core/sdk/sdk.js';
describe('RemoteObject', () => {
describe('fromLocalObject', () => {
it('can handle undefined', () => {
const remoteObject = SDK.RemoteObject.RemoteObject.fromLocalObject(undefined);
assert.deepEqual(remoteObject.type, 'undefined');
assert.deepEqual(remoteObject.subtype, undefined);
assert.deepEqual(remoteObject.value, undefined);
assert.deepEqual(remoteObject.description, 'undefined');
assert.deepEqual(remoteObject.unserializableValue(), undefined);
assert.deepEqual(remoteObject.hasChildren, false);
const callArguments = SDK.RemoteObject.RemoteObject.toCallArgument(remoteObject);
assert.deepEqual(callArguments.value, undefined);
assert.deepEqual(callArguments.unserializableValue, undefined);
});
it('can handle null', () => {
const remoteObject = SDK.RemoteObject.RemoteObject.fromLocalObject(null);
assert.deepEqual(remoteObject.type, 'object');
assert.deepEqual(remoteObject.subtype, 'null');
assert.deepEqual(remoteObject.value, null);
assert.deepEqual(remoteObject.description, 'null');
assert.deepEqual(remoteObject.unserializableValue(), undefined);
assert.deepEqual(remoteObject.hasChildren, false);
const callArguments = SDK.RemoteObject.RemoteObject.toCallArgument(remoteObject);
assert.deepEqual(callArguments.value, null);
assert.deepEqual(callArguments.unserializableValue, undefined);
});
it('can handle bigints', () => {
const remoteObject = SDK.RemoteObject.RemoteObject.fromLocalObject(1n);
assert.deepEqual(remoteObject.type, 'bigint');
assert.deepEqual(remoteObject.subtype, undefined);
assert.deepEqual(remoteObject.value, 1n);
assert.deepEqual(remoteObject.description, '1');
assert.deepEqual(remoteObject.unserializableValue(), '1n');
assert.deepEqual(remoteObject.hasChildren, false);
const callArguments = SDK.RemoteObject.RemoteObject.toCallArgument(remoteObject);
assert.deepEqual(callArguments.value, undefined);
assert.deepEqual(callArguments.unserializableValue, '1n');
});
it('can handle numbers', () => {
const remoteObject = SDK.RemoteObject.RemoteObject.fromLocalObject(42);
assert.deepEqual(remoteObject.type, 'number');
assert.deepEqual(remoteObject.subtype, undefined);
assert.deepEqual(remoteObject.value, 42);
assert.deepEqual(remoteObject.description, '42');
assert.deepEqual(remoteObject.unserializableValue(), undefined);
assert.deepEqual(remoteObject.hasChildren, false);
const callArguments = SDK.RemoteObject.RemoteObject.toCallArgument(remoteObject);
assert.deepEqual(callArguments.value, 42);
assert.deepEqual(callArguments.unserializableValue, undefined);
});
it('can handle strings', () => {
const remoteObject = SDK.RemoteObject.RemoteObject.fromLocalObject('foo string');
assert.deepEqual(remoteObject.type, 'string');
assert.deepEqual(remoteObject.subtype, undefined);
assert.deepEqual(remoteObject.value, 'foo string');
assert.deepEqual(remoteObject.description, 'foo string');
assert.deepEqual(remoteObject.unserializableValue(), undefined);
assert.deepEqual(remoteObject.hasChildren, false);
const callArguments = SDK.RemoteObject.RemoteObject.toCallArgument(remoteObject);
assert.deepEqual(callArguments.value, 'foo string');
assert.deepEqual(callArguments.unserializableValue, undefined);
});
it('can handle NaN', () => {
const remoteObject = SDK.RemoteObject.RemoteObject.fromLocalObject(NaN);
assert.deepEqual(remoteObject.type, 'number');
assert.deepEqual(remoteObject.subtype, undefined);
// Since equality comparisons don't work for NaN, we check if it is a number and if its string
// representation is the correct one.
assert.deepEqual(typeof remoteObject.value, 'number');
assert.deepEqual(String(remoteObject.value), String(NaN));
assert.deepEqual(remoteObject.description, 'NaN');
assert.deepEqual(remoteObject.unserializableValue(), 'NaN');
assert.deepEqual(remoteObject.hasChildren, false);
const callArguments = SDK.RemoteObject.RemoteObject.toCallArgument(remoteObject);
assert.deepEqual(callArguments.value, undefined);
assert.deepEqual(callArguments.unserializableValue, 'NaN');
});
it('can handle Infinity', () => {
const remoteObject = SDK.RemoteObject.RemoteObject.fromLocalObject(Infinity);
assert.deepEqual(remoteObject.type, 'number');
assert.deepEqual(remoteObject.subtype, undefined);
assert.deepEqual(remoteObject.value, Infinity);
assert.deepEqual(remoteObject.description, 'Infinity');
assert.deepEqual(remoteObject.unserializableValue(), 'Infinity');
assert.deepEqual(remoteObject.hasChildren, false);
const callArguments = SDK.RemoteObject.RemoteObject.toCallArgument(remoteObject);
assert.deepEqual(callArguments.value, undefined);
assert.deepEqual(callArguments.unserializableValue, 'Infinity');
});
it('can handle negative Infinity', () => {
const remoteObject = SDK.RemoteObject.RemoteObject.fromLocalObject(-Infinity);
assert.deepEqual(remoteObject.type, 'number');
assert.deepEqual(remoteObject.subtype, undefined);
assert.deepEqual(remoteObject.value, -Infinity);
assert.deepEqual(remoteObject.description, '-Infinity');
assert.deepEqual(remoteObject.unserializableValue(), '-Infinity');
assert.deepEqual(remoteObject.hasChildren, false);
const callArguments = SDK.RemoteObject.RemoteObject.toCallArgument(remoteObject);
assert.deepEqual(callArguments.value, undefined);
assert.deepEqual(callArguments.unserializableValue, '-Infinity');
});
it('can handle negative zero', () => {
const remoteObject = SDK.RemoteObject.RemoteObject.fromLocalObject(-0);
assert.deepEqual(remoteObject.type, 'number');
assert.deepEqual(remoteObject.subtype, undefined);
assert.deepEqual(remoteObject.value, -0);
assert.deepEqual(remoteObject.description, '0');
assert.deepEqual(remoteObject.unserializableValue(), '-0');
assert.deepEqual(remoteObject.hasChildren, false);
const callArguments = SDK.RemoteObject.RemoteObject.toCallArgument(remoteObject);
assert.deepEqual(callArguments.value, undefined);
assert.deepEqual(callArguments.unserializableValue, '-0');
});
it('can handle an array of numbers', () => {
const remoteObject = SDK.RemoteObject.RemoteObject.fromLocalObject([1n, 2, NaN, -0, null, undefined]);
assert.deepEqual(remoteObject.type, 'object');
assert.deepEqual(remoteObject.subtype, 'array');
assert.deepEqual(remoteObject.value, [1n, 2, NaN, -0, null, undefined]);
assert.deepEqual(remoteObject.description, '[1, 2, NaN, 0, null, undefined]');
assert.deepEqual(remoteObject.unserializableValue(), undefined);
assert.deepEqual(remoteObject.arrayLength(), 6);
assert.deepEqual(remoteObject.hasChildren, true);
const callArguments = SDK.RemoteObject.RemoteObject.toCallArgument(remoteObject);
assert.deepEqual(callArguments.value, [1n, 2, NaN, -0, null, undefined]);
assert.deepEqual(callArguments.unserializableValue, undefined);
});
it('can handle an object', () => {
const remoteObject = SDK.RemoteObject.RemoteObject.fromLocalObject({foo: 'bar'});
assert.deepEqual(remoteObject.type, 'object');
assert.deepEqual(remoteObject.subtype, undefined);
assert.deepEqual(remoteObject.value, {foo: 'bar'});
assert.deepEqual(remoteObject.description, '{foo: "bar"}');
assert.deepEqual(remoteObject.unserializableValue(), undefined);
assert.deepEqual(remoteObject.hasChildren, true);
const callArguments = SDK.RemoteObject.RemoteObject.toCallArgument(remoteObject);
assert.deepEqual(callArguments.value, {foo: 'bar'});
assert.deepEqual(callArguments.unserializableValue, undefined);
});
it('can handle a nested object', () => {
const remoteObject = SDK.RemoteObject.RemoteObject.fromLocalObject(
{foo: 'bar', baz: {nested: 34}, another: {nested: {object: true}}});
assert.deepEqual(remoteObject.type, 'object');
assert.deepEqual(remoteObject.subtype, undefined);
assert.deepEqual(remoteObject.value, {foo: 'bar', baz: {nested: 34}, another: {nested: {object: true}}});
assert.deepEqual(remoteObject.description, '{foo: "bar", baz: {nested: 34}, another: {nested: {object: true}}}');
assert.deepEqual(remoteObject.unserializableValue(), undefined);
assert.deepEqual(remoteObject.hasChildren, true);
const callArguments = SDK.RemoteObject.RemoteObject.toCallArgument(remoteObject);
assert.deepEqual(callArguments.value, {foo: 'bar', baz: {nested: 34}, another: {nested: {object: true}}});
assert.deepEqual(callArguments.unserializableValue, undefined);
});
it('can handle the function arguments object', () => {
const remoteObject = SDK.RemoteObject.RemoteObject.fromLocalObject((function(_a, _b, _c, _d) {
return arguments;
})(1, 2, 3, 4));
assert.deepEqual(remoteObject.type, 'object');
assert.deepEqual(remoteObject.subtype, undefined);
// We can't represent an `Arguments` object, but we can compare its structure
assert.deepEqual(String(remoteObject.value), '[object Arguments]');
assert.deepEqual(Object.keys(remoteObject.value), ['0', '1', '2', '3']);
assert.deepEqual(Object.values(remoteObject.value), [1, 2, 3, 4]);
assert.deepEqual(remoteObject.description, '{0: 1, 1: 2, 2: 3, 3: 4}');
assert.deepEqual(remoteObject.unserializableValue(), undefined);
assert.deepEqual(remoteObject.hasChildren, true);
const callArguments = SDK.RemoteObject.RemoteObject.toCallArgument(remoteObject);
assert.deepEqual(String(callArguments.value), '[object Arguments]');
assert.deepEqual(Object.keys(callArguments.value), ['0', '1', '2', '3']);
assert.deepEqual(Object.values(callArguments.value), [1, 2, 3, 4]);
assert.deepEqual(callArguments.unserializableValue, undefined);
});
it('can handle a function', () => {
const remoteObject = SDK.RemoteObject.RemoteObject.fromLocalObject(function func() {});
assert.deepEqual(remoteObject.type, 'function');
assert.deepEqual(remoteObject.subtype, undefined);
// We can't represent an `Function` object, but we can compare its structure
assert.deepEqual(typeof remoteObject.value, 'function');
assert.deepEqual(String(remoteObject.value), 'function func() { }');
assert.deepEqual(remoteObject.description, 'function func() { }');
assert.deepEqual(remoteObject.unserializableValue(), undefined);
assert.deepEqual(remoteObject.hasChildren, false);
const callArguments = SDK.RemoteObject.RemoteObject.toCallArgument(remoteObject);
assert.deepEqual(String(callArguments.value), 'function func() { }');
assert.deepEqual(callArguments.unserializableValue, undefined);
});
it('can handle an error', () => {
const remoteObject = SDK.RemoteObject.RemoteObject.fromLocalObject(new Error('Some error message'));
assert.deepEqual(remoteObject.type, 'object');
assert.deepEqual(remoteObject.subtype, undefined);
// We can't represent an `Error` object, but we can compare its structure
assert.deepEqual(typeof remoteObject.value, 'object');
assert.deepEqual(String(remoteObject.value), 'Error: Some error message');
assert.deepEqual(remoteObject.description, '{}');
assert.deepEqual(remoteObject.unserializableValue(), undefined);
assert.deepEqual(remoteObject.hasChildren, false);
const callArguments = SDK.RemoteObject.RemoteObject.toCallArgument(remoteObject);
assert.deepEqual(typeof callArguments.value, 'object');
assert.deepEqual(String(callArguments.value), 'Error: Some error message');
assert.deepEqual(callArguments.unserializableValue, undefined);
});
it('can handle a Date', () => {
const createNewFixedTimeDate = () => {
const fixedTimeDate = new Date();
fixedTimeDate.setUTCFullYear(2020);
fixedTimeDate.setUTCMonth(2);
fixedTimeDate.setUTCDate(10);
fixedTimeDate.setUTCHours(15);
fixedTimeDate.setUTCMinutes(38);
fixedTimeDate.setUTCSeconds(57);
fixedTimeDate.setUTCMilliseconds(345);
return fixedTimeDate;
};
const remoteObject = SDK.RemoteObject.RemoteObject.fromLocalObject(createNewFixedTimeDate());
assert.deepEqual(remoteObject.type, 'object');
assert.deepEqual(remoteObject.subtype, 'date');
assert.deepEqual(remoteObject.value, createNewFixedTimeDate());
if (remoteObject.description === undefined) {
assert.fail('Description is not defined');
return;
}
// Since string representations of dates are timezone dependent, we can't directly compare them.
// Instead, we should assume that the description represents a valid Date and then we parse that
// string as-is and compare that.
assert.deepEqual(
Date.parse(remoteObject.description), Date.parse('Tue Mar 10 2020 15:38:57 GMT+0000 (Greenwich Mean Time)'));
assert.deepEqual(remoteObject.unserializableValue(), undefined);
assert.deepEqual(remoteObject.hasChildren, false);
const callArguments = SDK.RemoteObject.RemoteObject.toCallArgument(remoteObject);
assert.deepEqual(callArguments.value, createNewFixedTimeDate());
assert.deepEqual(callArguments.unserializableValue, undefined);
});
});
}); | the_stack |
import IAccountDataStore from "../../common/IAccountDataStore";
import ILogger from "../../common/ILogger";
import BlobStorageContext from "../context/BlobStorageContext";
import StorageErrorFactory from "../errors/StorageErrorFactory";
import { AccessPolicy, BlobType } from "../generated/artifacts/models";
import Operation from "../generated/artifacts/operation";
import Context from "../generated/Context";
import IRequest from "../generated/IRequest";
import IBlobMetadataStore from "../persistence/IBlobMetadataStore";
import { BlobSASPermission } from "./BlobSASPermissions";
import { BlobSASResourceType } from "./BlobSASResourceType";
import IAuthenticator from "./IAuthenticator";
import {
generateBlobSASSignature,
IBlobSASSignatureValues
} from "./IBlobSASSignatureValues";
import {
OPERATION_BLOB_SAS_BLOB_PERMISSIONS,
OPERATION_BLOB_SAS_CONTAINER_PERMISSIONS
} from "./OperationBlobSASPermission";
export default class BlobSASAuthenticator implements IAuthenticator {
public constructor(
private readonly accountDataStore: IAccountDataStore,
private readonly blobMetadataStore: IBlobMetadataStore,
private readonly logger: ILogger
) {}
public async validate(
req: IRequest,
context: Context
): Promise<boolean | undefined> {
this.logger.info(
`BlobSASAuthenticator:validate() Start validation against blob service Shared Access Signature pattern.`,
context.contextId
);
this.logger.debug(
"BlobSASAuthenticator:validate() Getting account properties...",
context.contextId
);
const blobContext = new BlobStorageContext(context);
const account = blobContext.account;
if (account === undefined) {
throw RangeError(
`BlobSASAuthenticator:validate() account is undefined in context.`
);
}
const containerName = blobContext.container;
if (containerName === undefined) {
this.logger.error(
`BlobSASAuthenticator:validate() container name is undefined in context.`,
context.contextId
);
return undefined;
}
const blobName = blobContext.blob;
this.logger.debug(
// tslint:disable-next-line:max-line-length
`BlobSASAuthenticator:validate() Retrieved account name from context: ${account}, container: ${containerName}, blob: ${blobName}`,
context.contextId
);
// TODO: Make following async
const accountProperties = this.accountDataStore.getAccount(account);
if (accountProperties === undefined) {
throw StorageErrorFactory.getInvalidOperation(
context.contextId!,
"Invalid storage account."
);
}
this.logger.debug(
"BlobSASAuthenticator:validate() Got account properties successfully.",
context.contextId
);
// Extract blob service SAS authentication required parameters
const signature = this.decodeIfExist(req.getQuery("sig"));
this.logger.debug(
`BlobSASAuthenticator:validate() Retrieved signature from URL parameter sig: ${signature}`,
context.contextId
);
if (signature === undefined) {
this.logger.debug(
`BlobSASAuthenticator:validate() No signature found in request. Skip blob service SAS validation.`,
context.contextId
);
return undefined;
}
const resource = this.decodeIfExist(req.getQuery("sr"));
if (
resource !== BlobSASResourceType.Container &&
resource !== BlobSASResourceType.Blob &&
resource !== BlobSASResourceType.BlobSnapshot
) {
this.logger.debug(
// tslint:disable-next-line:max-line-length
`BlobSASAuthenticator:validate() Signed resource type ${resource} is invalid. Skip blob service SAS validation.`,
context.contextId
);
return undefined;
}
this.logger.debug(
`BlobSASAuthenticator:validate() Signed resource type is ${resource}.`,
context.contextId
);
const values = this.getBlobSASSignatureValuesFromRequest(
req,
containerName,
blobName,
context
);
if (values === undefined) {
this.logger.info(
// tslint:disable-next-line:max-line-length
`BlobSASAuthenticator:validate() Failed to get valid blob service SAS values from request. Skip blob service SAS validation.`,
context.contextId
);
return undefined;
}
this.logger.debug(
`BlobSASAuthenticator:validate() Successfully got valid blob service SAS values from request. ${JSON.stringify(
values
)}`,
context.contextId
);
this.logger.info(
`BlobSASAuthenticator:validate() Validate signature based account key1.`,
context.contextId
);
const [sig1, stringToSign1] = generateBlobSASSignature(
values,
resource,
account,
accountProperties.key1
);
this.logger.debug(
`BlobSASAuthenticator:validate() String to sign is: ${JSON.stringify(
stringToSign1
)}`,
context.contextId!
);
this.logger.debug(
`BlobSASAuthenticator:validate() Calculated signature is: ${sig1}`,
context.contextId!
);
const sig1Pass = sig1 === signature;
this.logger.info(
`BlobSASAuthenticator:validate() Signature based on key1 validation ${
sig1Pass ? "passed" : "failed"
}.`,
context.contextId
);
if (accountProperties.key2 !== undefined) {
this.logger.info(
`BlobSASAuthenticator:validate() Account key2 is not empty, validate signature based account key2.`,
context.contextId
);
const [sig2, stringToSign2] = generateBlobSASSignature(
values,
resource,
account,
accountProperties.key2
);
this.logger.debug(
`BlobSASAuthenticator:validate() String to sign is: ${JSON.stringify(
stringToSign2
)}`,
context.contextId!
);
this.logger.debug(
`BlobSASAuthenticator:validate() Calculated signature is: ${sig2}`,
context.contextId!
);
const sig2Pass = sig2 !== signature;
this.logger.info(
`BlobSASAuthenticator:validate() Signature based on key2 validation ${
sig2Pass ? "passed" : "failed"
}.`,
context.contextId
);
if (!sig2Pass && !sig1Pass) {
this.logger.info(
`BlobSASAuthenticator:validate() Validate signature based account key1 and key2 failed.`,
context.contextId
);
return false;
}
} else {
if (!sig1Pass) {
return false;
}
}
// When signature validation passes, we enforce blob service SAS validation
// Any validation errors will stop this request immediately
// TODO: Validate permissions from ACL identifier by extract permissions, start time and expiry time from ACL
if (values.identifier !== undefined) {
const accessPolicy:
| AccessPolicy
| undefined = await this.getContainerAccessPolicyByIdentifier(
account,
containerName,
values.identifier,
context
);
if (accessPolicy === undefined) {
this.logger.warn(
`BlobSASAuthenticator:validate() Cannot get access policy defined for container ${containerName} with id ${values.identifier}.`,
context.contextId
);
throw StorageErrorFactory.getAuthorizationFailure(context.contextId!);
}
values.startTime = accessPolicy.start;
values.expiryTime = accessPolicy.expiry;
values.permissions = accessPolicy.permission;
}
this.logger.info(
`BlobSASAuthenticator:validate() Validate start and expiry time.`,
context.contextId
);
if (!this.validateTime(values.expiryTime, values.startTime)) {
this.logger.info(
`BlobSASAuthenticator:validate() Validate start and expiry failed.`,
context.contextId
);
throw StorageErrorFactory.getAuthorizationFailure(context.contextId!);
}
this.logger.info(
`BlobSASAuthenticator:validate() Validate IP range.`,
context.contextId
);
if (!this.validateIPRange()) {
this.logger.info(
`BlobSASAuthenticator:validate() Validate IP range failed.`,
context.contextId
);
throw StorageErrorFactory.getAuthorizationSourceIPMismatch(
context.contextId!
);
}
this.logger.info(
`BlobSASAuthenticator:validate() Validate request protocol.`,
context.contextId
);
if (!this.validateProtocol(values.protocol, req.getProtocol())) {
this.logger.info(
`BlobSASAuthenticator:validate() Validate protocol failed.`,
context.contextId
);
throw StorageErrorFactory.getAuthorizationProtocolMismatch(
context.contextId!
);
}
const operation = context.operation;
if (operation === undefined) {
throw new Error(
// tslint:disable-next-line:max-line-length
`BlobSASAuthenticator:validate() Operation shouldn't be undefined. Please make sure DispatchMiddleware is hooked before authentication related middleware.`
);
}
const blobSASPermission =
resource === BlobSASResourceType.Blob
? OPERATION_BLOB_SAS_BLOB_PERMISSIONS.get(operation)
: OPERATION_BLOB_SAS_CONTAINER_PERMISSIONS.get(operation);
this.logger.debug(
`BlobSASAuthenticator:validate() Got permission requirements for operation ${
Operation[operation]
} - ${JSON.stringify(blobSASPermission)}`,
context.contextId
);
if (blobSASPermission === undefined) {
throw new Error(
// tslint:disable-next-line:max-line-length
`BlobSASAuthenticator:validate() ${
resource === BlobSASResourceType.Blob
? "OPERATION_BLOB_SAS_BLOB_PERMISSIONS"
: "OPERATION_BLOB_SAS_CONTAINER_PERMISSIONS"
} doesn't have configuration for operation ${
Operation[operation]
}'s blob service SAS permission.`
);
}
if (!blobSASPermission.validatePermissions(values.permissions!)) {
throw StorageErrorFactory.getAuthorizationPermissionMismatch(
context.contextId!
);
}
// Check 3 special permission requirements
// If block blob exists, then permission must be Write only
// If page blob exists, then permission must be Write only
// If append blob exists, then permission must be Write only
// If copy destination blob exists, then permission must be Write only
if (
operation === Operation.BlockBlob_Upload ||
operation === Operation.PageBlob_Create ||
operation === Operation.AppendBlob_Create ||
operation === Operation.Blob_StartCopyFromURL ||
operation === Operation.Blob_CopyFromURL
) {
this.logger.info(
`BlobSASAuthenticator:validate() For ${Operation[operation]}, if blob exists, the permission must be Write.`,
context.contextId
);
if (
(await this.blobExist(account, containerName!, blobName!)) &&
!values.permissions!.toString().includes(BlobSASPermission.Write)
) {
this.logger.info(
`BlobSASAuthenticator:validate() Account SAS validation failed for special requirement.`,
context.contextId
);
throw StorageErrorFactory.getAuthorizationPermissionMismatch(
context.contextId!
);
}
}
this.logger.info(
`BlobSASAuthenticator:validate() Blob service SAS validation successfully.`,
context.contextId
);
// TODO: Handle enforced response headers defined in blob service SAS
return true;
}
private getBlobSASSignatureValuesFromRequest(
req: IRequest,
containerName: string,
blobName?: string,
context?: Context
): IBlobSASSignatureValues | undefined {
const version = this.decodeIfExist(req.getQuery("sv"));
const protocol = this.decodeIfExist(req.getQuery("spr"));
const startTime = this.decodeIfExist(req.getQuery("st"));
const expiryTime = this.decodeIfExist(req.getQuery("se"));
const permissions = this.decodeIfExist(req.getQuery("sp"));
const ipRange = this.decodeIfExist(req.getQuery("sip"));
const identifier = this.decodeIfExist(req.getQuery("si"));
const cacheControl = req.getQuery("rscc");
const contentDisposition = req.getQuery("rscd");
const contentEncoding = req.getQuery("rsce");
const contentLanguage = req.getQuery("rscl");
const contentType = req.getQuery("rsct");
const signedResource = this.decodeIfExist(req.getQuery("sr"));
const snapshot = this.decodeIfExist(req.getQuery("snapshot"));
if (!identifier && (!permissions || !expiryTime)) {
this.logger.warn(
// tslint:disable-next-line:max-line-length
`BlobSASAuthenticator:generateBlobSASSignature(): Must provide 'permissions' and 'expiryTime' for Blob SAS generation when 'identifier' is not provided.`,
context ? context.contextId : undefined
);
return undefined;
}
if (version === undefined) {
this.logger.warn(
// tslint:disable-next-line:max-line-length
`BlobSASAuthenticator:generateBlobSASSignature(): Must provide 'version'.`,
context ? context.contextId : undefined
);
return undefined;
}
const blobSASValues: IBlobSASSignatureValues = {
version,
protocol,
startTime,
expiryTime,
permissions,
ipRange,
containerName,
blobName,
identifier,
cacheControl,
contentDisposition,
contentEncoding,
contentLanguage,
contentType,
signedResource,
snapshot
};
return blobSASValues;
}
private validateTime(expiry?: Date | string, start?: Date | string): boolean {
if (expiry === undefined && start === undefined) {
return true;
}
const now = new Date();
if (expiry !== undefined) {
const expiryTime = new Date(expiry);
if (now > expiryTime) {
return false;
}
}
if (start !== undefined) {
const startTime = new Date(start);
if (now < startTime) {
return false;
}
}
return true;
}
private validateIPRange(): boolean {
// TODO: Emulator doesn't validate IP Address
return true;
}
private validateProtocol(
sasProtocol: string = "https,http",
requestProtocol: string
): boolean {
if (sasProtocol.includes(",")) {
return true;
} else {
return sasProtocol.toLowerCase() === requestProtocol;
}
}
private decodeIfExist(value?: string): string | undefined {
return value === undefined ? value : decodeURIComponent(value);
}
private async getContainerAccessPolicyByIdentifier(
account: string,
container: string,
id: string,
context: Context
): Promise<AccessPolicy | undefined> {
try {
const containerModel = await this.blobMetadataStore.getContainerACL(
context,
account,
container
);
if (containerModel === undefined) {
return undefined;
}
if (containerModel.containerAcl === undefined) {
return undefined;
}
for (const acl of containerModel.containerAcl) {
if (acl.id === id) {
return acl.accessPolicy;
}
}
} catch (err) {
return undefined;
}
}
private async blobExist(
account: string,
container: string,
blob: string
): Promise<boolean> {
const blobModel = await this.blobMetadataStore.getBlobType(
account,
container,
blob
);
if (blobModel === undefined) {
return false;
}
if (
blobModel.blobType === BlobType.BlockBlob &&
blobModel.isCommitted === false
) {
return false;
}
return true;
}
} | the_stack |
/*
* ---------------------------------------------------------
* Copyright(C) Microsoft Corporation. All rights reserved.
* ---------------------------------------------------------
*
* ---------------------------------------------------------
* Generated file, DO NOT EDIT
* ---------------------------------------------------------
*/
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
import * as restm from 'typed-rest-client/RestClient';
import vsom = require('./VsoClient');
import basem = require('./ClientApiBases');
import VsoBaseInterfaces = require('./interfaces/common/VsoBaseInterfaces');
import ExtensionManagementInterfaces = require("./interfaces/ExtensionManagementInterfaces");
import GalleryInterfaces = require("./interfaces/GalleryInterfaces");
export interface IExtensionManagementApi extends basem.ClientApiBase {
getAcquisitionOptions(itemId: string, testCommerce?: boolean, isFreeOrTrialInstall?: boolean, isAccountOwner?: boolean, isLinked?: boolean, isConnectedServer?: boolean, isBuyOperationValid?: boolean): Promise<ExtensionManagementInterfaces.AcquisitionOptions>;
requestAcquisition(acquisitionRequest: ExtensionManagementInterfaces.ExtensionAcquisitionRequest): Promise<ExtensionManagementInterfaces.ExtensionAcquisitionRequest>;
getAuditLog(publisherName: string, extensionName: string): Promise<ExtensionManagementInterfaces.ExtensionAuditLog>;
registerAuthorization(publisherName: string, extensionName: string, registrationId: string): Promise<ExtensionManagementInterfaces.ExtensionAuthorization>;
createDocumentByName(doc: any, publisherName: string, extensionName: string, scopeType: string, scopeValue: string, collectionName: string): Promise<any>;
deleteDocumentByName(publisherName: string, extensionName: string, scopeType: string, scopeValue: string, collectionName: string, documentId: string): Promise<void>;
getDocumentByName(publisherName: string, extensionName: string, scopeType: string, scopeValue: string, collectionName: string, documentId: string): Promise<any>;
getDocumentsByName(publisherName: string, extensionName: string, scopeType: string, scopeValue: string, collectionName: string): Promise<any[]>;
setDocumentByName(doc: any, publisherName: string, extensionName: string, scopeType: string, scopeValue: string, collectionName: string): Promise<any>;
updateDocumentByName(doc: any, publisherName: string, extensionName: string, scopeType: string, scopeValue: string, collectionName: string): Promise<any>;
queryCollectionsByName(collectionQuery: ExtensionManagementInterfaces.ExtensionDataCollectionQuery, publisherName: string, extensionName: string): Promise<ExtensionManagementInterfaces.ExtensionDataCollection[]>;
getStates(includeDisabled?: boolean, includeErrors?: boolean, includeInstallationIssues?: boolean, forceRefresh?: boolean): Promise<ExtensionManagementInterfaces.ExtensionState[]>;
queryExtensions(query: ExtensionManagementInterfaces.InstalledExtensionQuery): Promise<ExtensionManagementInterfaces.InstalledExtension[]>;
getInstalledExtensions(includeDisabledExtensions?: boolean, includeErrors?: boolean, assetTypes?: string[], includeInstallationIssues?: boolean): Promise<ExtensionManagementInterfaces.InstalledExtension[]>;
updateInstalledExtension(extension: ExtensionManagementInterfaces.InstalledExtension): Promise<ExtensionManagementInterfaces.InstalledExtension>;
getInstalledExtensionByName(publisherName: string, extensionName: string, assetTypes?: string[]): Promise<ExtensionManagementInterfaces.InstalledExtension>;
installExtensionByName(publisherName: string, extensionName: string, version?: string): Promise<ExtensionManagementInterfaces.InstalledExtension>;
uninstallExtensionByName(publisherName: string, extensionName: string, reason?: string, reasonCode?: string): Promise<void>;
getPolicies(userId: string): Promise<GalleryInterfaces.UserExtensionPolicy>;
resolveRequest(rejectMessage: string, publisherName: string, extensionName: string, requesterId: string, state: ExtensionManagementInterfaces.ExtensionRequestState): Promise<number>;
getRequests(): Promise<ExtensionManagementInterfaces.RequestedExtension[]>;
resolveAllRequests(rejectMessage: string, publisherName: string, extensionName: string, state: ExtensionManagementInterfaces.ExtensionRequestState): Promise<number>;
deleteRequest(publisherName: string, extensionName: string): Promise<void>;
requestExtension(publisherName: string, extensionName: string, requestMessage: string): Promise<ExtensionManagementInterfaces.RequestedExtension>;
getToken(): Promise<string>;
}
export class ExtensionManagementApi extends basem.ClientApiBase implements IExtensionManagementApi {
constructor(baseUrl: string, handlers: VsoBaseInterfaces.IRequestHandler[], options?: VsoBaseInterfaces.IRequestOptions) {
super(baseUrl, handlers, 'node-ExtensionManagement-api', options);
}
public static readonly RESOURCE_AREA_ID = "6c2b0933-3600-42ae-bf8b-93d4f7e83594";
/**
* @param {string} itemId
* @param {boolean} testCommerce
* @param {boolean} isFreeOrTrialInstall
* @param {boolean} isAccountOwner
* @param {boolean} isLinked
* @param {boolean} isConnectedServer
* @param {boolean} isBuyOperationValid
*/
public async getAcquisitionOptions(
itemId: string,
testCommerce?: boolean,
isFreeOrTrialInstall?: boolean,
isAccountOwner?: boolean,
isLinked?: boolean,
isConnectedServer?: boolean,
isBuyOperationValid?: boolean
): Promise<ExtensionManagementInterfaces.AcquisitionOptions> {
if (itemId == null) {
throw new TypeError('itemId can not be null or undefined');
}
return new Promise<ExtensionManagementInterfaces.AcquisitionOptions>(async (resolve, reject) => {
let routeValues: any = {
};
let queryValues: any = {
itemId: itemId,
testCommerce: testCommerce,
isFreeOrTrialInstall: isFreeOrTrialInstall,
isAccountOwner: isAccountOwner,
isLinked: isLinked,
isConnectedServer: isConnectedServer,
isBuyOperationValid: isBuyOperationValid,
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"288dff58-d13b-468e-9671-0fb754e9398c",
routeValues,
queryValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<ExtensionManagementInterfaces.AcquisitionOptions>;
res = await this.rest.get<ExtensionManagementInterfaces.AcquisitionOptions>(url, options);
let ret = this.formatResponse(res.result,
ExtensionManagementInterfaces.TypeInfo.AcquisitionOptions,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* @param {ExtensionManagementInterfaces.ExtensionAcquisitionRequest} acquisitionRequest
*/
public async requestAcquisition(
acquisitionRequest: ExtensionManagementInterfaces.ExtensionAcquisitionRequest
): Promise<ExtensionManagementInterfaces.ExtensionAcquisitionRequest> {
return new Promise<ExtensionManagementInterfaces.ExtensionAcquisitionRequest>(async (resolve, reject) => {
let routeValues: any = {
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"da616457-eed3-4672-92d7-18d21f5c1658",
routeValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<ExtensionManagementInterfaces.ExtensionAcquisitionRequest>;
res = await this.rest.create<ExtensionManagementInterfaces.ExtensionAcquisitionRequest>(url, acquisitionRequest, options);
let ret = this.formatResponse(res.result,
ExtensionManagementInterfaces.TypeInfo.ExtensionAcquisitionRequest,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* @param {string} publisherName
* @param {string} extensionName
*/
public async getAuditLog(
publisherName: string,
extensionName: string
): Promise<ExtensionManagementInterfaces.ExtensionAuditLog> {
return new Promise<ExtensionManagementInterfaces.ExtensionAuditLog>(async (resolve, reject) => {
let routeValues: any = {
publisherName: publisherName,
extensionName: extensionName
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"23a312e0-562d-42fb-a505-5a046b5635db",
routeValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<ExtensionManagementInterfaces.ExtensionAuditLog>;
res = await this.rest.get<ExtensionManagementInterfaces.ExtensionAuditLog>(url, options);
let ret = this.formatResponse(res.result,
ExtensionManagementInterfaces.TypeInfo.ExtensionAuditLog,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* @param {string} publisherName
* @param {string} extensionName
* @param {string} registrationId
*/
public async registerAuthorization(
publisherName: string,
extensionName: string,
registrationId: string
): Promise<ExtensionManagementInterfaces.ExtensionAuthorization> {
return new Promise<ExtensionManagementInterfaces.ExtensionAuthorization>(async (resolve, reject) => {
let routeValues: any = {
publisherName: publisherName,
extensionName: extensionName,
registrationId: registrationId
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"f21cfc80-d2d2-4248-98bb-7820c74c4606",
routeValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<ExtensionManagementInterfaces.ExtensionAuthorization>;
res = await this.rest.replace<ExtensionManagementInterfaces.ExtensionAuthorization>(url, null, options);
let ret = this.formatResponse(res.result,
null,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* @param {any} doc
* @param {string} publisherName
* @param {string} extensionName
* @param {string} scopeType
* @param {string} scopeValue
* @param {string} collectionName
*/
public async createDocumentByName(
doc: any,
publisherName: string,
extensionName: string,
scopeType: string,
scopeValue: string,
collectionName: string
): Promise<any> {
return new Promise<any>(async (resolve, reject) => {
let routeValues: any = {
publisherName: publisherName,
extensionName: extensionName,
scopeType: scopeType,
scopeValue: scopeValue,
collectionName: collectionName
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"bbe06c18-1c8b-4fcd-b9c6-1535aaab8749",
routeValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<any>;
res = await this.rest.create<any>(url, doc, options);
let ret = this.formatResponse(res.result,
null,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* @param {string} publisherName
* @param {string} extensionName
* @param {string} scopeType
* @param {string} scopeValue
* @param {string} collectionName
* @param {string} documentId
*/
public async deleteDocumentByName(
publisherName: string,
extensionName: string,
scopeType: string,
scopeValue: string,
collectionName: string,
documentId: string
): Promise<void> {
return new Promise<void>(async (resolve, reject) => {
let routeValues: any = {
publisherName: publisherName,
extensionName: extensionName,
scopeType: scopeType,
scopeValue: scopeValue,
collectionName: collectionName,
documentId: documentId
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"bbe06c18-1c8b-4fcd-b9c6-1535aaab8749",
routeValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<void>;
res = await this.rest.del<void>(url, options);
let ret = this.formatResponse(res.result,
null,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* @param {string} publisherName
* @param {string} extensionName
* @param {string} scopeType
* @param {string} scopeValue
* @param {string} collectionName
* @param {string} documentId
*/
public async getDocumentByName(
publisherName: string,
extensionName: string,
scopeType: string,
scopeValue: string,
collectionName: string,
documentId: string
): Promise<any> {
return new Promise<any>(async (resolve, reject) => {
let routeValues: any = {
publisherName: publisherName,
extensionName: extensionName,
scopeType: scopeType,
scopeValue: scopeValue,
collectionName: collectionName,
documentId: documentId
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"bbe06c18-1c8b-4fcd-b9c6-1535aaab8749",
routeValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<any>;
res = await this.rest.get<any>(url, options);
let ret = this.formatResponse(res.result,
null,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* @param {string} publisherName
* @param {string} extensionName
* @param {string} scopeType
* @param {string} scopeValue
* @param {string} collectionName
*/
public async getDocumentsByName(
publisherName: string,
extensionName: string,
scopeType: string,
scopeValue: string,
collectionName: string
): Promise<any[]> {
return new Promise<any[]>(async (resolve, reject) => {
let routeValues: any = {
publisherName: publisherName,
extensionName: extensionName,
scopeType: scopeType,
scopeValue: scopeValue,
collectionName: collectionName
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"bbe06c18-1c8b-4fcd-b9c6-1535aaab8749",
routeValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<any[]>;
res = await this.rest.get<any[]>(url, options);
let ret = this.formatResponse(res.result,
null,
true);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* @param {any} doc
* @param {string} publisherName
* @param {string} extensionName
* @param {string} scopeType
* @param {string} scopeValue
* @param {string} collectionName
*/
public async setDocumentByName(
doc: any,
publisherName: string,
extensionName: string,
scopeType: string,
scopeValue: string,
collectionName: string
): Promise<any> {
return new Promise<any>(async (resolve, reject) => {
let routeValues: any = {
publisherName: publisherName,
extensionName: extensionName,
scopeType: scopeType,
scopeValue: scopeValue,
collectionName: collectionName
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"bbe06c18-1c8b-4fcd-b9c6-1535aaab8749",
routeValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<any>;
res = await this.rest.replace<any>(url, doc, options);
let ret = this.formatResponse(res.result,
null,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* @param {any} doc
* @param {string} publisherName
* @param {string} extensionName
* @param {string} scopeType
* @param {string} scopeValue
* @param {string} collectionName
*/
public async updateDocumentByName(
doc: any,
publisherName: string,
extensionName: string,
scopeType: string,
scopeValue: string,
collectionName: string
): Promise<any> {
return new Promise<any>(async (resolve, reject) => {
let routeValues: any = {
publisherName: publisherName,
extensionName: extensionName,
scopeType: scopeType,
scopeValue: scopeValue,
collectionName: collectionName
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"bbe06c18-1c8b-4fcd-b9c6-1535aaab8749",
routeValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<any>;
res = await this.rest.update<any>(url, doc, options);
let ret = this.formatResponse(res.result,
null,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* Query for one or more data collections for the specified extension. Note: the token used for authorization must have been issued on behalf of the specified extension.
*
* @param {ExtensionManagementInterfaces.ExtensionDataCollectionQuery} collectionQuery
* @param {string} publisherName - Name of the publisher. Example: "fabrikam".
* @param {string} extensionName - Name of the extension. Example: "ops-tools".
*/
public async queryCollectionsByName(
collectionQuery: ExtensionManagementInterfaces.ExtensionDataCollectionQuery,
publisherName: string,
extensionName: string
): Promise<ExtensionManagementInterfaces.ExtensionDataCollection[]> {
return new Promise<ExtensionManagementInterfaces.ExtensionDataCollection[]>(async (resolve, reject) => {
let routeValues: any = {
publisherName: publisherName,
extensionName: extensionName
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"56c331f1-ce53-4318-adfd-4db5c52a7a2e",
routeValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<ExtensionManagementInterfaces.ExtensionDataCollection[]>;
res = await this.rest.create<ExtensionManagementInterfaces.ExtensionDataCollection[]>(url, collectionQuery, options);
let ret = this.formatResponse(res.result,
null,
true);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* List state and version information for all installed extensions.
*
* @param {boolean} includeDisabled - If true (the default), include disabled extensions in the results.
* @param {boolean} includeErrors - If true, include installed extensions in an error state in the results.
* @param {boolean} includeInstallationIssues
* @param {boolean} forceRefresh
*/
public async getStates(
includeDisabled?: boolean,
includeErrors?: boolean,
includeInstallationIssues?: boolean,
forceRefresh?: boolean
): Promise<ExtensionManagementInterfaces.ExtensionState[]> {
return new Promise<ExtensionManagementInterfaces.ExtensionState[]>(async (resolve, reject) => {
let routeValues: any = {
};
let queryValues: any = {
includeDisabled: includeDisabled,
includeErrors: includeErrors,
includeInstallationIssues: includeInstallationIssues,
forceRefresh: forceRefresh,
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"92755d3d-9a8a-42b3-8a4d-87359fe5aa93",
routeValues,
queryValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<ExtensionManagementInterfaces.ExtensionState[]>;
res = await this.rest.get<ExtensionManagementInterfaces.ExtensionState[]>(url, options);
let ret = this.formatResponse(res.result,
ExtensionManagementInterfaces.TypeInfo.ExtensionState,
true);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* @param {ExtensionManagementInterfaces.InstalledExtensionQuery} query
*/
public async queryExtensions(
query: ExtensionManagementInterfaces.InstalledExtensionQuery
): Promise<ExtensionManagementInterfaces.InstalledExtension[]> {
return new Promise<ExtensionManagementInterfaces.InstalledExtension[]>(async (resolve, reject) => {
let routeValues: any = {
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"046c980f-1345-4ce2-bf85-b46d10ff4cfd",
routeValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<ExtensionManagementInterfaces.InstalledExtension[]>;
res = await this.rest.create<ExtensionManagementInterfaces.InstalledExtension[]>(url, query, options);
let ret = this.formatResponse(res.result,
ExtensionManagementInterfaces.TypeInfo.InstalledExtension,
true);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* List the installed extensions in the account / project collection.
*
* @param {boolean} includeDisabledExtensions - If true (the default), include disabled extensions in the results.
* @param {boolean} includeErrors - If true, include installed extensions with errors.
* @param {string[]} assetTypes
* @param {boolean} includeInstallationIssues
*/
public async getInstalledExtensions(
includeDisabledExtensions?: boolean,
includeErrors?: boolean,
assetTypes?: string[],
includeInstallationIssues?: boolean
): Promise<ExtensionManagementInterfaces.InstalledExtension[]> {
return new Promise<ExtensionManagementInterfaces.InstalledExtension[]>(async (resolve, reject) => {
let routeValues: any = {
};
let queryValues: any = {
includeDisabledExtensions: includeDisabledExtensions,
includeErrors: includeErrors,
assetTypes: assetTypes && assetTypes.join(":"),
includeInstallationIssues: includeInstallationIssues,
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"275424d0-c844-4fe2-bda6-04933a1357d8",
routeValues,
queryValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<ExtensionManagementInterfaces.InstalledExtension[]>;
res = await this.rest.get<ExtensionManagementInterfaces.InstalledExtension[]>(url, options);
let ret = this.formatResponse(res.result,
ExtensionManagementInterfaces.TypeInfo.InstalledExtension,
true);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* Update an installed extension. Typically this API is used to enable or disable an extension.
*
* @param {ExtensionManagementInterfaces.InstalledExtension} extension
*/
public async updateInstalledExtension(
extension: ExtensionManagementInterfaces.InstalledExtension
): Promise<ExtensionManagementInterfaces.InstalledExtension> {
return new Promise<ExtensionManagementInterfaces.InstalledExtension>(async (resolve, reject) => {
let routeValues: any = {
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"275424d0-c844-4fe2-bda6-04933a1357d8",
routeValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<ExtensionManagementInterfaces.InstalledExtension>;
res = await this.rest.update<ExtensionManagementInterfaces.InstalledExtension>(url, extension, options);
let ret = this.formatResponse(res.result,
ExtensionManagementInterfaces.TypeInfo.InstalledExtension,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* Get an installed extension by its publisher and extension name.
*
* @param {string} publisherName - Name of the publisher. Example: "fabrikam".
* @param {string} extensionName - Name of the extension. Example: "ops-tools".
* @param {string[]} assetTypes
*/
public async getInstalledExtensionByName(
publisherName: string,
extensionName: string,
assetTypes?: string[]
): Promise<ExtensionManagementInterfaces.InstalledExtension> {
return new Promise<ExtensionManagementInterfaces.InstalledExtension>(async (resolve, reject) => {
let routeValues: any = {
publisherName: publisherName,
extensionName: extensionName
};
let queryValues: any = {
assetTypes: assetTypes && assetTypes.join(":"),
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"fb0da285-f23e-4b56-8b53-3ef5f9f6de66",
routeValues,
queryValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<ExtensionManagementInterfaces.InstalledExtension>;
res = await this.rest.get<ExtensionManagementInterfaces.InstalledExtension>(url, options);
let ret = this.formatResponse(res.result,
ExtensionManagementInterfaces.TypeInfo.InstalledExtension,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* Install the specified extension into the account / project collection.
*
* @param {string} publisherName - Name of the publisher. Example: "fabrikam".
* @param {string} extensionName - Name of the extension. Example: "ops-tools".
* @param {string} version
*/
public async installExtensionByName(
publisherName: string,
extensionName: string,
version?: string
): Promise<ExtensionManagementInterfaces.InstalledExtension> {
return new Promise<ExtensionManagementInterfaces.InstalledExtension>(async (resolve, reject) => {
let routeValues: any = {
publisherName: publisherName,
extensionName: extensionName,
version: version
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"fb0da285-f23e-4b56-8b53-3ef5f9f6de66",
routeValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<ExtensionManagementInterfaces.InstalledExtension>;
res = await this.rest.create<ExtensionManagementInterfaces.InstalledExtension>(url, null, options);
let ret = this.formatResponse(res.result,
ExtensionManagementInterfaces.TypeInfo.InstalledExtension,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* Uninstall the specified extension from the account / project collection.
*
* @param {string} publisherName - Name of the publisher. Example: "fabrikam".
* @param {string} extensionName - Name of the extension. Example: "ops-tools".
* @param {string} reason
* @param {string} reasonCode
*/
public async uninstallExtensionByName(
publisherName: string,
extensionName: string,
reason?: string,
reasonCode?: string
): Promise<void> {
return new Promise<void>(async (resolve, reject) => {
let routeValues: any = {
publisherName: publisherName,
extensionName: extensionName
};
let queryValues: any = {
reason: reason,
reasonCode: reasonCode,
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"fb0da285-f23e-4b56-8b53-3ef5f9f6de66",
routeValues,
queryValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<void>;
res = await this.rest.del<void>(url, options);
let ret = this.formatResponse(res.result,
null,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* @param {string} userId
*/
public async getPolicies(
userId: string
): Promise<GalleryInterfaces.UserExtensionPolicy> {
return new Promise<GalleryInterfaces.UserExtensionPolicy>(async (resolve, reject) => {
let routeValues: any = {
userId: userId
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"e5cc8c09-407b-4867-8319-2ae3338cbf6f",
routeValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<GalleryInterfaces.UserExtensionPolicy>;
res = await this.rest.get<GalleryInterfaces.UserExtensionPolicy>(url, options);
let ret = this.formatResponse(res.result,
GalleryInterfaces.TypeInfo.UserExtensionPolicy,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* @param {string} rejectMessage
* @param {string} publisherName
* @param {string} extensionName
* @param {string} requesterId
* @param {ExtensionManagementInterfaces.ExtensionRequestState} state
*/
public async resolveRequest(
rejectMessage: string,
publisherName: string,
extensionName: string,
requesterId: string,
state: ExtensionManagementInterfaces.ExtensionRequestState
): Promise<number> {
if (state == null) {
throw new TypeError('state can not be null or undefined');
}
return new Promise<number>(async (resolve, reject) => {
let routeValues: any = {
publisherName: publisherName,
extensionName: extensionName,
requesterId: requesterId
};
let queryValues: any = {
state: state,
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"aa93e1f3-511c-4364-8b9c-eb98818f2e0b",
routeValues,
queryValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<number>;
res = await this.rest.update<number>(url, rejectMessage, options);
let ret = this.formatResponse(res.result,
null,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
*/
public async getRequests(
): Promise<ExtensionManagementInterfaces.RequestedExtension[]> {
return new Promise<ExtensionManagementInterfaces.RequestedExtension[]>(async (resolve, reject) => {
let routeValues: any = {
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"216b978f-b164-424e-ada2-b77561e842b7",
routeValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<ExtensionManagementInterfaces.RequestedExtension[]>;
res = await this.rest.get<ExtensionManagementInterfaces.RequestedExtension[]>(url, options);
let ret = this.formatResponse(res.result,
ExtensionManagementInterfaces.TypeInfo.RequestedExtension,
true);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* @param {string} rejectMessage
* @param {string} publisherName
* @param {string} extensionName
* @param {ExtensionManagementInterfaces.ExtensionRequestState} state
*/
public async resolveAllRequests(
rejectMessage: string,
publisherName: string,
extensionName: string,
state: ExtensionManagementInterfaces.ExtensionRequestState
): Promise<number> {
if (state == null) {
throw new TypeError('state can not be null or undefined');
}
return new Promise<number>(async (resolve, reject) => {
let routeValues: any = {
publisherName: publisherName,
extensionName: extensionName
};
let queryValues: any = {
state: state,
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"ba93e1f3-511c-4364-8b9c-eb98818f2e0b",
routeValues,
queryValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<number>;
res = await this.rest.update<number>(url, rejectMessage, options);
let ret = this.formatResponse(res.result,
null,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* @param {string} publisherName
* @param {string} extensionName
*/
public async deleteRequest(
publisherName: string,
extensionName: string
): Promise<void> {
return new Promise<void>(async (resolve, reject) => {
let routeValues: any = {
publisherName: publisherName,
extensionName: extensionName
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"f5afca1e-a728-4294-aa2d-4af0173431b5",
routeValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<void>;
res = await this.rest.del<void>(url, options);
let ret = this.formatResponse(res.result,
null,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
* @param {string} publisherName
* @param {string} extensionName
* @param {string} requestMessage
*/
public async requestExtension(
publisherName: string,
extensionName: string,
requestMessage: string
): Promise<ExtensionManagementInterfaces.RequestedExtension> {
return new Promise<ExtensionManagementInterfaces.RequestedExtension>(async (resolve, reject) => {
let routeValues: any = {
publisherName: publisherName,
extensionName: extensionName
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"f5afca1e-a728-4294-aa2d-4af0173431b5",
routeValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<ExtensionManagementInterfaces.RequestedExtension>;
res = await this.rest.create<ExtensionManagementInterfaces.RequestedExtension>(url, requestMessage, options);
let ret = this.formatResponse(res.result,
ExtensionManagementInterfaces.TypeInfo.RequestedExtension,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
/**
*/
public async getToken(
): Promise<string> {
return new Promise<string>(async (resolve, reject) => {
let routeValues: any = {
};
try {
let verData: vsom.ClientVersioningData = await this.vsoClient.getVersioningData(
"6.1-preview.1",
"ExtensionManagement",
"3a2e24ed-1d6f-4cb2-9f3b-45a96bbfaf50",
routeValues);
let url: string = verData.requestUrl!;
let options: restm.IRequestOptions = this.createRequestOptions('application/json',
verData.apiVersion);
let res: restm.IRestResponse<string>;
res = await this.rest.get<string>(url, options);
let ret = this.formatResponse(res.result,
null,
false);
resolve(ret);
}
catch (err) {
reject(err);
}
});
}
} | the_stack |
import { untracked } from "mobx";
import { Topic, Partition, Broker, ConfigEntry, BrokerConfig } from "../../../../state/restInterfaces";
import { clone, toJson } from "../../../../utils/jsonUtils";
// Requirements:
// 1. Each replica must be on a different broker (unless replicationFactor < brokerCount makes it impossible).
// 2. Partitions should be balanced as evenly as possible across the brokers
// Optimization:
// If, while we're looking for a broker to assigning a replica to, we find there are multiple "good" brokers, we can
// optimize according to the following priority list.
// 1. rack: try to stay within the same rack, maybe even the same broker (is that neccesary?)
// 2. partition count, brokers should have roughly the same number of replicas (need to get all topics, all their partitions to even know which broker has which partitions)
// 3. disk space, only if partition count is equal and can't help us decide
// also optimize network traffic: that means trying to stay "inter rack" (bc that traffic is mostly free)
//
// maybe "unassign" all replicas, subtracing used disk space from that broker,
// then, from a fresh start, assign preferably to the original broker, or to a broker in the same rack, or ... (using optimization priorities)
// Input for a reassignment computation. A selection of partitions that should be reassigned.
export type TopicPartitions = {
topic: Topic // topic the partitions belong to
partitions: Partition[], // selected partitions
}
// Result of a reassignment computation. Tells you what brokers to use for each partition in each topic.
export type TopicAssignments = {
[topicName: string]: {
[partitionId: number]: PartitionAssignments
}
};
export type PartitionAssignments = {
partition: Partition,
brokers: Broker[] // brokers the replicas are on; meaning length is always equal to the replicationFactor
};
type BrokerReplicaCount = { // track how many replicas were assigned to a broker (over all partitions)
broker: ExBroker;
assignedReplicas: number;
};
export type ApiData = { brokers: Broker[], topics: Topic[], topicPartitions: Map<string, Partition[]> };
function computeReassignments(
apiData: ApiData,
selectedTopicPartitions: TopicPartitions[],
targetBrokers: Broker[]
): TopicAssignments {
checkArguments(apiData, selectedTopicPartitions, targetBrokers);
// Track information like used disk space per broker, so we extend each broker with some metadata
const allExBrokers = apiData.brokers.map(b => new ExBroker(b));
const targetExBrokers = allExBrokers.filter(exb => targetBrokers.find(b => exb.brokerId == b.brokerId) != undefined);
const getExBroker = (brokerId: number): ExBroker => {
const exBroker = allExBrokers.find(x => x.brokerId == brokerId);
if (exBroker === undefined) throw new Error("cannot find ExBroker with brokerId " + brokerId);
return exBroker;
};
// Recompute broker stats:
// For the sake of calculation, it is easier to start with a fresh slate.
// So we first 'virtually' remove these assignments by going through each replica
// and subtracting its size(disk space) from broker it is on.
for (const broker of allExBrokers)
broker.recompute(apiData, selectedTopicPartitions);
const resultAssignments: TopicAssignments = {};
for (const t of selectedTopicPartitions) {
resultAssignments[t.topic.topicName] = {};
for (const partition of t.partitions)
resultAssignments[t.topic.topicName][partition.id] = { partition: partition, brokers: [] };
}
// Distribute:
// Go through each topic, assign the replicas of its partitions to the brokers
for (const topicPartitions of selectedTopicPartitions) {
if (topicPartitions.topic.replicationFactor <= 0) continue; // must be an error?
if (topicPartitions.partitions.length == 0) continue; // no partitions to be reassigned in this topic
computeTopicAssignments(topicPartitions, targetExBrokers, allExBrokers, resultAssignments[topicPartitions.topic.topicName]);
}
// Optimize Leaders:
// Every broker should have roughly the same number of partitions it leads.
const skewActual = calcRange(allExBrokers, x => x.actualLeader);
const optimizationLog: { skewBefore: number, skewAfter: number, swaps: number }[] = [];
for (let round = 0; round < 10; round++) {
const { range: skewBefore } = calcRange(allExBrokers, x => x.plannedLeader);
const leaderSwitchCount = balanceLeaders(selectedTopicPartitions, resultAssignments, allExBrokers, apiData);
const { range: skewAfter } = calcRange(allExBrokers, x => x.plannedLeader);
optimizationLog.push({
skewBefore,
skewAfter,
swaps: leaderSwitchCount
});
if (skewAfter == 0 || skewBefore <= skewAfter) {
break;
}
}
const skewPlannedInCluster = calcRange(allExBrokers, x => x.plannedLeader);
console.debug(`leader skew in cluster`, {
actual: {
skew: skewActual.range,
min: `${skewActual.minValue} partitions (brokerId ${skewActual.min?.brokerId})`,
max: `${skewActual.maxValue} partitions (brokerId ${skewActual.max?.brokerId})`,
},
planned: {
skew: skewPlannedInCluster.range,
min: `${skewPlannedInCluster.minValue} partitions (brokerId ${skewPlannedInCluster.min?.brokerId})`,
max: `${skewPlannedInCluster.maxValue} partitions (brokerId ${skewPlannedInCluster.max?.brokerId})`,
},
log: optimizationLog
});
const criticalBrokers = findRiskyPartitions(allExBrokers, selectedTopicPartitions, resultAssignments, getExBroker);
reportRiskyPartitions(criticalBrokers);
return resultAssignments;
}
const untrackedCompute = function (apiData: ApiData,
selectedTopicPartitions: TopicPartitions[],
targetBrokers: Broker[]
): TopicAssignments {
return untracked(() => computeReassignments(apiData, selectedTopicPartitions, targetBrokers));
}
export { untrackedCompute as computeReassignments };
// Compute, for the partitions of a single topic, to which brokers their replicas should be assigned to.
function computeTopicAssignments(
topicPartitions: TopicPartitions,
targetBrokers: ExBroker[],
allBrokers: ExBroker[],
resultAssignments: { [partitionId: number]: PartitionAssignments; },
) {
const { topic, partitions } = topicPartitions;
// shouldn't happen, if the user didn't select any partitions, the entry for that topic shouldn't be there either
if (partitions.length == 0) return;
const replicationFactor = topic.replicationFactor;
if (replicationFactor <= 0) return; // normally it shouldn't be possible; every topic must have at least 1 replica for each of its partitions
// Track how many replicas (of this topic!) were assigned to each broker
const brokerReplicaCount: BrokerReplicaCount[] = targetBrokers.map(b => ({ broker: b, assignedReplicas: 0 }));
// Find the most suitable brokers for each partition
partitions.sort((a, b) => a.id - b.id);
for (const partition of partitions) {
// Find brokers to host the replicas (order is not important, leader is determined later)
const replicaAssignments = computeReplicaAssignments(partition, replicationFactor, brokerReplicaCount, allBrokers);
resultAssignments[partition.id].brokers = replicaAssignments;
}
}
// todo:
// The current approach is "greedy" in that it just wants to assign a replica to whatever broker.
// But because of 'example#2' it is possible that we'll end up causing a lot of network traffic that
// could have been avoided if we had just the two brokers.
// A better appraoch would be to find the best broker for each replica, but instead of commiting to that immediately,
// we'd first save that as a "pending" assignment, along with a score of how much work that assignment would be.
// That'd give us a list of pending assignments, which we can sort by their score.
// To determine that score we'd just re-use the first two very simple metrics (same broker is best: score=2, and same rack is almost as good: score=1)
function computeReplicaAssignments(partition: Partition, replicas: number, brokerReplicaCount: BrokerReplicaCount[], allBrokers: ExBroker[]): ExBroker[] {
const resultBrokers: ExBroker[] = []; // result
const sourceBrokers = partition.replicas.map(id => allBrokers.first(b => b.brokerId == id)!);
if (sourceBrokers.any(x => x == null)) throw new Error(`replicas of partition ${partition.id} (${toJson(partition.replicas)}) define a brokerId which can't be found in 'allBrokers': ${toJson(allBrokers.map(b => ({ id: b.brokerId, address: b.address, rack: b.rack })))}`);
const sourceRacks = sourceBrokers.map(b => b.rack).distinct();
// Track brokers we've used so far (trying to not use any broker twice)
const consumedBrokers: ExBroker[] = [];
// Track racks we've used so far (trying to not use any rack twice)
const replicasInRack = (rack: string) => brokerReplicaCount.filter(x => x.broker.rack == rack).sum(x => x.assignedReplicas);
for (let i = 0; i < replicas; i++) {
// Sort to find the best broker (better brokers first)
brokerReplicaCount.sort((a, b) => {
// Precondition
// Each broker can't host more than 1 replica of a partition
const aConsumed = consumedBrokers.includes(a.broker);
const bConsumed = consumedBrokers.includes(b.broker);
if (aConsumed && !bConsumed) return 1;
if (!aConsumed && bConsumed) return -1;
// 1. Prefer distribution across different racks.
// If we already assigned a replica to one broker, we'd like to
// assign the next replica to a broker in a different rack.
// (-> robustness against outages of a whole rack)
const replicasInRackA = replicasInRack(a.broker.rack);
const replicasInRackB = replicasInRack(b.broker.rack);
if (replicasInRackA < replicasInRackB) return -1;
if (replicasInRackB < replicasInRackA) return 1;
// 2. Prefer the broker hosting the fewest replicas (for *this* topic).
// (-> balanced replicas across user-selected brokers)
const topicReplicasOnA = a.assignedReplicas;
const topicReplicasOnB = b.assignedReplicas;
if (topicReplicasOnA < topicReplicasOnB) return -1;
if (topicReplicasOnB < topicReplicasOnA) return 1;
// 3. Prefer the broker hosting the fewest replicas (over *all* topics).
// (-> balanced replica count across cluster)
const replicasOnA = a.broker.plannedReplicas;
const replicasOnB = b.broker.plannedReplicas;
if (replicasOnA < replicasOnB) return -1;
if (replicasOnB < replicasOnA) return 1;
// 4. Prefer using the same brokers as before.
// (-> no network traffic)
const aIsSource = sourceBrokers.includes(a.broker);
const bIsSource = sourceBrokers.includes(b.broker);
if (aIsSource && !bIsSource) return -1;
if (bIsSource && !aIsSource) return 1;
// 5. Prefer using the same rack as before.
// (-> less expensive network traffic)
const aIsSameRack = sourceRacks.includes(a.broker.rack);
const bIsSameRack = sourceRacks.includes(b.broker.rack);
if (aIsSameRack && !bIsSameRack) return -1;
if (bIsSameRack && !aIsSameRack) return 1;
// 6. Prefer brokers with free disk space
// (-> balanced disk-space across cluster)
const diskOnA = a.broker.plannedSize;
const diskOnB = b.broker.plannedSize;
if (diskOnA < diskOnB) return -1;
if (diskOnB < diskOnA) return 1;
// They're identical, so it doesn't matter which one we use.
return 0;
});
// Take the best broker
const bestTrackedBroker = brokerReplicaCount[0];
const bestBroker = bestTrackedBroker.broker;
bestTrackedBroker.assignedReplicas++; // increase temporary counter (which only tracks assignments within the topic)
resultBrokers.push(bestBroker);
consumedBrokers.push(bestBroker);
// Increase total number of assigned replicas
bestBroker.assignedReplicas++;
// The new assignment will take up disk space, which must be tracked as well.
bestBroker.assignedSize += partition.replicaSize;
}
// Count the first broker in the list as the leader
resultBrokers[0].assignedLeader++;
return resultBrokers;
}
function balanceLeaders(selectedTopicPartitions: TopicPartitions[], resultAssignments: TopicAssignments, allExBrokers: ExBroker[], apiData: ApiData) {
let leaderSwitchCount = 0;
for (const t of selectedTopicPartitions) {
for (const p of t.partitions) {
// map plain brokers to extended brokers (those with attached tracking data)
const newBrokers = resultAssignments[t.topic.topicName][p.id].brokers.map(b => allExBrokers.first(e => e.brokerId == b.brokerId)!);
const plannedLeader = newBrokers[0];
// from all the brokers that will soon be the ones hosting this partitions replicas,
// is there one that would be better suited to be the leader?
// Sort them by ascending leader count (number of partitions they lead)
// We must make a copy of the newBrokers aray because we don't want to modify it (yet)
const sortedBrokers = newBrokers.slice(0);
sortedBrokers.sort((a, b) => a.plannedLeader - b.plannedLeader);
const betterLeader = sortedBrokers[0];
if (betterLeader != plannedLeader) {
// We found a better leader, swap the two and adjust their tracking info
const betterLeaderIndex = newBrokers.indexOf(betterLeader);
if (betterLeaderIndex < 0)
throw new Error('cannot find new/best leader in exBroker array');
// Swap the two brokers
newBrokers[0] = betterLeader;
newBrokers[betterLeaderIndex] = plannedLeader;
// Adjust tracking info for leaders and consumed disk size
plannedLeader.assignedLeader--;
plannedLeader.assignedSize -= p.replicaSize;
betterLeader.assignedLeader++;
betterLeader.assignedSize += p.replicaSize;
// Update final assignments
// mapping our extendedBrokers back to the "simple" brokers
resultAssignments[t.topic.topicName][p.id].brokers = newBrokers.map(exBroker => apiData.brokers.first(b => b.brokerId == exBroker.brokerId)!);
leaderSwitchCount++;
}
}
}
return leaderSwitchCount;
}
type RiskyPartition = {
topic: Topic,
partition: Partition,
criticalRacks: string[], // if this rack is offline, the whole partition will be unavailable
criticalBrokers: ExBroker[], // if this broker is offline...
};
function findRiskyPartitions(
targetBrokers: ExBroker[],
selectedTopicPartitions: TopicPartitions[],
resultAssignments: TopicAssignments,
getExBroker: (id: number) => ExBroker
): RiskyPartition[] {
const targetRacks = targetBrokers.map(b => b.rack).distinct();
const riskyPartitions: RiskyPartition[] = [];
for (const t of selectedTopicPartitions) {
const { topic, partitions } = t;
const topicAssignments = resultAssignments[topic.topicName];
for (const partition of partitions) {
const partitionAssignments = topicAssignments[partition.id];
const replicaBrokers = partitionAssignments.brokers.map(b => getExBroker(b.brokerId));
// Will this partition be unavailable when any one broker is offline?
// If so, which are the "critical" brokers (or racks) for each partition?
const riskyPartition: RiskyPartition = {
topic, partition,
criticalBrokers: [],
criticalRacks: [],
};
// Check if the partition would be offline if each single broker is offline
for (const b of targetBrokers) {
const remainingBrokers = replicaBrokers.except([b]);
if (remainingBrokers.length == 0)
riskyPartition.criticalBrokers.push(b);
}
// Check for offline racks
for (const rack of targetRacks) {
const remainingBrokers = replicaBrokers.filter(x => x.rack !== rack);
if (remainingBrokers.length == 0)
riskyPartition.criticalRacks.push(rack);
}
if (riskyPartition.criticalRacks.length > 0 || riskyPartition.criticalBrokers.length > 0)
riskyPartitions.push(riskyPartition);
}
}
return riskyPartitions;
}
// If there are any critical brokers or racks, this might indicate a bug in the distribution algorithm.
// Except for situations where a topic has a replication factor of 1 (each partition will always be on only one broker),
// or when there is only a single rack available (happens when brokers have null or empty string set as their rack).
function reportRiskyPartitions(riskyPartitions: RiskyPartition[]) {
for (const { key: topic, items: partitionIssues } of riskyPartitions.groupInto(x => x.topic)) {
// topics with rf:1 will always be an issue
if (topic.replicationFactor <= 1)
continue;
// check if any partition has any issues
const issues = partitionIssues.filter(x => x.criticalBrokers.length > 0 || x.criticalRacks.length > 0);
if (issues.length == 0)
continue;
// at least one partition with issues, report as table
const table = issues.map(x => ({
partitionId: x.partition.id,
criticalBrokers: x.criticalBrokers.map(b => String(b.brokerId)).join(', '),
criticalRacks: x.criticalRacks.join(', '),
}));
console.error(`Issues in topic "${topic.topicName}" (RF: ${topic.replicationFactor}) (${issues.length} issues).`);
console.table(table);
}
}
// Broker extended with tracking information.
// Used to quickly determine which is the best broker for a given replica, without having to recompute the tracked information all the time
// (Otherwise we'd have to iterate over every topic/partition/replica all the time)
class ExBroker implements Broker {
brokerId: number;
logDirSize: number;
address: string;
rack: string;
config: BrokerConfig;
// Values as they actually are currently in the cluster
actualReplicas: number = 0; // number of all replicas (no matter from which topic) assigned to this broker
actualSize: number = 0; // total size used by all the replicas assigned to this broker
actualLeader: number = 0; // for how many partitions is this broker the leader?
// 'actual' values minus everything that is to be reassigned
// in other words: the state of the broker without counting anything we're about to reassign
initialReplicas: number = 0;
initialSize: number = 0;
initialLeader: number = 0;
// values of the current assignments
// counting only whenever we assign something to this broker.
assignedReplicas: number = 0;
assignedSize: number = 0;
assignedLeader: number = 0;
get plannedReplicas(): number { return this.initialReplicas + this.assignedReplicas; }
get plannedSize(): number { return this.initialSize + this.assignedSize; }
get plannedLeader(): number { return this.initialLeader + this.assignedLeader; }
constructor(sourceBroker: Broker) {
Object.assign(this, sourceBroker);
}
recompute(apiData: ApiData, selectedTopicPartitions: TopicPartitions[]) {
this.recomputeActual(apiData);
this.recomputeInitial(selectedTopicPartitions);
}
private recomputeActual(apiData: ApiData) {
this.actualReplicas = 0;
this.actualSize = 0;
this.actualLeader = 0;
if (apiData.topicPartitions == null)
throw new Error(`cannot recompute actual usage of broker '${this.brokerId}' because 'api.topicPartitions == null' (no permissions?)`);
for (const [topic, partitions] of apiData.topicPartitions) {
if (partitions == null) throw new Error(`cannot recompute actual usage of broker '${this.brokerId}' for topic '${topic}', because 'partitions == null' (no permissions?)`);
for (const p of partitions) {
// replicas
const replicasAssignedToThisBroker = p.replicas.count(x => x == this.brokerId);
this.actualReplicas += replicasAssignedToThisBroker;
this.actualLeader += p.leader == this.brokerId ? 1 : 0;
if (replicasAssignedToThisBroker > 0) {
// This broker hosts a replica of this partition
// find size of the logdir for the partition on this broker
const logDirEntry = p.partitionLogDirs.first(x => !x.error && x.brokerId == this.brokerId);
if (logDirEntry !== undefined)
this.actualSize += logDirEntry.size;
else {
// todo:
// - fallback to another entry? (using maximum size we find)
// - throw error?
}
}
}
}
}
private recomputeInitial(selectedTopicPartitions: TopicPartitions[]) {
// Subtract the stats of the selected partitions
let selectedReplicas = 0;
let selectedSize = 0;
let selectedLeader = 0;
for (const topic of selectedTopicPartitions)
for (const p of topic.partitions) {
const replicasOnBroker = p.replicas.count(x => x == this.brokerId);
selectedReplicas += replicasOnBroker;
selectedLeader += (p.leader == this.brokerId) ? 1 : 0;
if (replicasOnBroker > 0) {
// using 'first()' because each broker has exactly one logDirEntry (or maybe zero if broker is offline)
const logDirEntry = p.partitionLogDirs.first(x => !x.error && x.brokerId == this.brokerId);
if (logDirEntry) {
// direct match
selectedSize += logDirEntry.size;
} else {
// broker offline, use fallback
selectedSize += p.replicaSize;
}
}
}
// Since at the start we'll pretend the selected partitions are not assigned to any broker
// we'll get our "initial" from actual minus selecte.
this.initialReplicas = this.actualReplicas - selectedReplicas;
this.initialSize = this.actualSize - selectedSize;
this.initialLeader = this.actualLeader - selectedLeader;
}
}
function checkArguments(
apiData: ApiData,
selectedTopicPartitions: TopicPartitions[],
targetBrokers: Broker[]) {
// Check for missing or invalid api data
throwIfNullOrEmpty("apiData.brokers", apiData.brokers);
throwIfNullOrEmpty("apiData.topics", apiData.topics);
throwIfNullOrEmpty("apiData.topicPartitions", apiData.topicPartitions);
const topicsMissingPartitionData = apiData.topics.filter(t => apiData.topicPartitions.get(t.topicName) == null);
if (topicsMissingPartitionData.length > 0)
throw new Error("apiData is missing topicPartitions for these topics: " + topicsMissingPartitionData.map(t => t.topicName).join(', '))
// Require at least one selected partition
if (selectedTopicPartitions.sum(x => x.partitions.length) == 0)
throw new Error("No partitions selected");
// Require at least as many brokers as the highest replication factor of any selected partition
const maxRf = selectedTopicPartitions
.groupInto(t => t.topic.replicationFactor) // group topics by replication factor
.sort((a, b) => b.key - a.key)[0]; // sort descending, then take first group
if (maxRf.key > targetBrokers.length)
throw new Error(`You selected ${targetBrokers.length} target brokers, but the following topics have a replicationFactor of ${maxRf.key}, so at least ${maxRf.key} target brokers are required: ${toJson(maxRf.items.map(t => t.topic.topicName))}`);
}
function throwIfNullOrEmpty(name: string, obj: any[] | Map<any, any>) {
if (obj == null)
throw new Error(name + " is null");
if (Array.isArray(obj)) {
if (obj.length == 0)
throw new Error(name + " is empty");
} else {
if (obj.size == 0)
throw new Error(name + " is empty");
}
}
function calcRange<T>(ar: T[], selector: (item: T) => number): {
range: number,
min: T | undefined,
minValue: number,
max: T | undefined,
maxValue: number,
} {
if (ar.length == 0) return {
range: 0,
min: undefined,
minValue: 0,
max: undefined,
maxValue: 0
};
const max = ar.maxBy(selector)!;
const maxVal = selector(max);
const min = ar.minBy(selector)!;
const minVal = selector(min);
const range = maxVal - minVal;
return {
range: range,
min: min,
minValue: minVal,
max: max,
maxValue: maxVal,
};
}
function dumpBrokerInfo(title: string, brokers: ExBroker[]) {
console.log(title);
console.table(brokers.map((x: ExBroker) => ({
id: x.brokerId,
address: x.address,
actualLeader: x.actualLeader,
plannedLeader: x.plannedLeader,
actualReplicas: x.actualReplicas,
plannedReplicas: x.plannedReplicas,
actualSize: x.actualSize,
plannedSize: x.plannedSize,
full: {
...x
}
})), [
'address',
'actualLeader',
'plannedLeader',
'actualReplicas',
'plannedReplicas',
'actualSize',
'plannedSize'
]);
} | the_stack |
import * as jsonschema from "jsonschema";
import { Inputs, OptionItem } from "../types";
export type ValidateFunc<T> = (
input: T,
inputs?: Inputs
) => string | undefined | Promise<string | undefined>;
/**
* Validation for Any Instance Type
* JSON Schema Validation reference: http://json-schema.org/draft/2019-09/json-schema-validation.html
*/
export interface StaticValidation {
/**
* whether the value is required or not, default value is true if it is undefined
*/
required?: boolean;
/**
* An instance validates successfully against this keyword if its value is equal to the value of the keyword.
*/
equals?: unknown;
}
/**
* Validation for Strings
*/
export interface StringValidation extends StaticValidation {
/**
* A string instance is valid against this keyword if its length is less than, or equal to, the value of this keyword.
*/
maxLength?: number;
/**
* A string instance is valid against this keyword if its length is greater than, or equal to, the value of this keyword.
*/
minLength?: number;
/**
* A string instance is considered valid if the regular expression matches the instance successfully.
*/
pattern?: string;
/**
* A string instance validates successfully against this keyword if its value is equal to one of the elements in this keyword's array value.
*/
enum?: string[]; // the value must be contained in this list
/**
* A string instance is valid against this keyword if the string starts with the value of this keyword.
*/
startsWith?: string;
/**
* A string instance is valid against this keyword if the string ends with the value of this keyword.
*/
endsWith?: string;
/**
* A string instance is valid against this keyword if the string contains the value of this keyword.
*/
includes?: string;
/**
* An instance validates successfully against this keyword if its value is equal to the value of the keyword.
*/
equals?: string;
}
/**
* Validation for String Arrays
*/
export interface StringArrayValidation extends StaticValidation {
/**
* The value of this keyword MUST be a non-negative integer.
* An array instance is valid against "maxItems" if its size is less than, or equal to, the value of this keyword.
*/
maxItems?: number;
/**
* The value of this keyword MUST be a non-negative integer.
* An array instance is valid against "minItems" if its size is greater than, or equal to, the value of this keyword.
*/
minItems?: number;
/**
* If this keyword has boolean value false, the instance validates successfully. If it has boolean value true, the instance validates successfully if all of its elements are unique.
*/
uniqueItems?: boolean;
/**
* An instance validates successfully against this string array if they have the exactly the same elements.
*/
equals?: string[];
/**
* An array instance is valid against "enum" array if all of the elements of the array is contained in the `enum` array.
*/
enum?: string[];
/**
* An array instance is valid against "contains" if it contains the value of `contains`
*/
contains?: string;
/**
* An array instance is valid against "containsAll" array if it contains all of the elements of `containsAll` array.
*/
containsAll?: string[];
/**
* An array instance is valid against "containsAny" array if it contains any one of the elements of `containsAny` array.
*/
containsAny?: string[]; ///non-standard, the values must contains any one in the array
}
/**
* The validation is checked by a validFunc provided by user
*/
export interface FuncValidation<
T extends string | string[] | OptionItem | OptionItem[] | undefined
> {
/**
* A function that will be called to validate input and to give a hint to the user.
*
* @param input The current value of the input to be validated.
* @return A human-readable string which is presented as diagnostic message.
* Return `undefined` when 'value' is valid.
*/
validFunc: ValidateFunc<T>;
}
/**
* Definition of validation schema, which is a union of `StringValidation`, `StringArrayValidation` and `FuncValidation<any>`
*/
export type ValidationSchema = StringValidation | StringArrayValidation | FuncValidation<any>;
/**
* A function to return a validation function according the validation schema
* @param validation validation schema
* @param inputs object to carry all user inputs
* @returns a validation function
*/
export function getValidationFunction<T extends string | string[] | undefined>(
validation: ValidationSchema,
inputs: Inputs
): (input: T) => string | undefined | Promise<string | undefined> {
return function (input: T): string | undefined | Promise<string | undefined> {
return validate(validation, input, inputs);
};
}
/**
* Implementation of validation function
* @param validSchema validation schema
* @param value value to validate
* @param inputs user inputs object, which works as the context of the validation
* @returns A human-readable string which is presented as diagnostic message.
* Return `undefined` when 'value' is valid.
*/
export async function validate<T extends string | string[] | OptionItem | OptionItem[] | undefined>(
validSchema: ValidationSchema,
value: T,
inputs?: Inputs
): Promise<string | undefined> {
{
//FuncValidation
const funcValidation: FuncValidation<T> = validSchema as FuncValidation<T>;
if (funcValidation.validFunc) {
const res = await funcValidation.validFunc(value, inputs);
return res as string;
}
}
if (!value) {
if ((validSchema as StaticValidation).required === true) return `input value is required.`;
}
{
// StringValidation
const stringValidation: StringValidation = validSchema as StringValidation;
const strToValidate = value as string;
if (typeof strToValidate === "string") {
const schema: any = {};
if (stringValidation.equals && typeof stringValidation.equals === "string")
schema.const = stringValidation.equals;
if (
stringValidation.enum &&
stringValidation.enum.length > 0 &&
typeof stringValidation.enum[0] === "string"
)
schema.enum = stringValidation.enum;
if (stringValidation.minLength) schema.minLength = stringValidation.minLength;
if (stringValidation.maxLength) schema.maxLength = stringValidation.maxLength;
if (stringValidation.pattern) schema.pattern = stringValidation.pattern;
if (Object.keys(schema).length > 0) {
const validateResult = jsonschema.validate(strToValidate, schema);
if (validateResult.errors && validateResult.errors.length > 0) {
return `'${strToValidate}' ${validateResult.errors[0].message}`;
}
}
if (stringValidation.startsWith) {
if (!strToValidate.startsWith(stringValidation.startsWith)) {
return `'${strToValidate}' does not meet startsWith:'${stringValidation.startsWith}'`;
}
}
if (stringValidation.endsWith) {
if (!strToValidate.endsWith(stringValidation.endsWith)) {
return `'${strToValidate}' does not meet endsWith:'${stringValidation.endsWith}'`;
}
}
if (stringValidation.includes && typeof strToValidate === "string") {
if (!strToValidate.includes(stringValidation.includes)) {
return `'${strToValidate}' does not meet includes:'${stringValidation.includes}'`;
}
}
}
}
//StringArrayValidation
{
const stringArrayValidation: StringArrayValidation = validSchema as StringArrayValidation;
const arrayToValidate = value as string[];
if (arrayToValidate instanceof Array) {
const schema: any = {};
if (stringArrayValidation.maxItems) schema.maxItems = stringArrayValidation.maxItems;
if (stringArrayValidation.minItems) schema.minItems = stringArrayValidation.minItems;
if (stringArrayValidation.uniqueItems) schema.uniqueItems = stringArrayValidation.uniqueItems;
if (Object.keys(schema).length > 0) {
const validateResult = jsonschema.validate(arrayToValidate, schema);
if (validateResult.errors && validateResult.errors.length > 0) {
return `'${arrayToValidate}' ${validateResult.errors[0].message}`;
}
}
if (stringArrayValidation.equals) {
if (stringArrayValidation.equals instanceof Array) {
stringArrayValidation.enum = stringArrayValidation.equals;
stringArrayValidation.containsAll = stringArrayValidation.equals;
} else {
return `Array '${arrayToValidate}' does not equals to string:'${stringArrayValidation.equals}'`;
}
}
if (stringArrayValidation.enum) {
for (const item of arrayToValidate) {
if (!stringArrayValidation.enum.includes(item)) {
return `'${arrayToValidate}' does not meet with enum:'${stringArrayValidation.enum}'`;
}
}
}
if (stringArrayValidation.contains) {
if (!arrayToValidate.includes(stringArrayValidation.contains)) {
return `'${arrayToValidate}' does not meet with contains:'${stringArrayValidation.contains}'`;
}
}
if (stringArrayValidation.containsAll) {
const containsAll: string[] = stringArrayValidation.containsAll;
if (containsAll.length > 0) {
for (const i of containsAll) {
if (!arrayToValidate.includes(i)) {
return `'${arrayToValidate}' does not meet with containsAll:'${containsAll}'`;
}
}
}
}
if (stringArrayValidation.containsAny) {
const containsAny: string[] = stringArrayValidation.containsAny;
if (containsAny.length > 0) {
// let array = valueToValidate as string[];
let found = false;
for (const i of containsAny) {
if (arrayToValidate.includes(i)) {
found = true;
break;
}
}
if (!found) {
return `'${arrayToValidate}' does not meet containsAny:'${containsAny}'`;
}
}
}
}
}
return undefined;
} | the_stack |
export = SystemJSLoader;
export as namespace SystemJSLoader;
declare global {
const SystemJS: typeof SystemJSLoader;
/**
* @deprecated use SystemJS https://github.com/systemjs/systemjs/releases/tag/0.19.10
*/
const System: typeof SystemJSLoader;
const __moduleName: string;
}
declare const SystemJSLoader: SystemJSLoader.System;
declare namespace SystemJSLoader {
interface ModulesList {
[bundleName: string]: string[];
}
interface PackageList<T> {
[packageName: string]: T;
}
/**
* The following module formats are supported:
*
* - esm: ECMAScript Module (previously referred to as es6)
* - cjs: CommonJS
* - amd: Asynchronous Module Definition
* - global: Global shim module format
* - register: System.register or System.registerDynamic compatibility module format
*
*/
type ModuleFormat = "esm" | "cjs" | "amd" | "global" | "register";
/**
* Sets the module name of the transpiler to be used for loading ES6 modules.
* Represents a module name for System.import that must resolve to either Traceur, Babel or TypeScript.
* When set to traceur, babel or typescript, loading will be automatically configured as far as possible.
*/
type Transpiler = "plugin-traceur" | "plugin-babel" | "plugin-typescript" | "traceur" | "babel" | "typescript" | false;
type ConfigMap = PackageList<string | PackageList<string>>;
type ConfigMeta = PackageList<MetaConfig>;
interface MetaConfig {
/**
* Sets in what format the module is loaded.
*/
format?: ModuleFormat | undefined;
/**
* For the global format, when automatic detection of exports is not enough, a custom exports meta value can be set.
* This tells the loader what global name to use as the module's export value.
*/
exports?: string | undefined;
/**
* Dependencies to load before this module. Goes through regular paths and map normalization.
* Only supported for the cjs, amd and global formats.
*/
deps?: string[] | undefined;
/**
* A map of global names to module names that should be defined only for the execution of this module.
* Enables use of legacy code that expects certain globals to be present.
* Referenced modules automatically becomes dependencies. Only supported for the cjs and global formats.
*/
globals?: string | undefined;
/**
* Set a loader for this meta path.
*/
loader?: string | undefined;
/**
* For plugin transpilers to set the source map of their transpilation.
*/
sourceMap?: any;
/**
* Load the module using <script> tag injection.
*/
scriptLoad?: boolean | undefined;
/**
* The nonce attribute to use when loading the script as a way to enable CSP.
* This should correspond to the "nonce-" attribute set in the Content-Security-Policy header.
*/
nonce?: string | undefined;
/**
* The subresource integrity attribute corresponding to the script integrity,
* describing the expected hash of the final code to be executed.
* For example, System.config({ meta: { 'src/example.js': { integrity: 'sha256-e3b0c44...' }});
* would throw an error if the translated source of src/example.js doesn't match the expected hash.
*/
integrity?: string | undefined;
/**
* When scripts are loaded from a different domain (e.g. CDN) the global error handler (window.onerror)
* has very limited information about errors to prevent unintended leaking. In order to mitigate this,
* the <script> tags need to set crossorigin attribute and the server needs to enable CORS.
* The valid values are "anonymous" and "use-credentials".
*/
crossOrigin?: string | undefined;
/**
* When loading a module that is not an ECMAScript Module, we set the module as the default export,
* but then also iterate the module object and copy named exports for it a well.
* Use this option to disable this iteration and copying of the exports.
*/
esmExports?: boolean | undefined;
/**
* To ignore resources that shouldn't be traced as part of the build.
* Use with the SystemJS Builder. (https://github.com/systemjs/builder#ignore-resources)
*/
build?: boolean | undefined;
/**
* A truthy value enables sending credentials to the server on every request. Additionally, a string value adds
* an "Authorization" header with that value to all requests.
*/
authorization?: string | boolean | undefined;
}
interface PackageConfig {
/**
* The main entry point of the package (so import 'local/package' is equivalent to import 'local/package/index.js')
*/
main?: string | undefined;
/**
* The module format of the package. See Module Formats.
*/
format?: ModuleFormat | undefined;
/**
* The default extension to add to modules requested within the package. Takes preference over defaultJSExtensions.
* Can be set to defaultExtension: false to optionally opt-out of extension-adding when defaultJSExtensions is enabled.
*/
defaultExtension?: boolean | string | undefined;
/**
* Local and relative map configurations scoped to the package. Apply for subpaths as well.
*/
map?: ConfigMap | undefined;
/**
* Module meta provides an API for SystemJS to understand how to load modules correctly.
* Package-scoped meta configuration with wildcard support. Modules are subpaths within the package path.
* This also provides an opt-out mechanism for defaultExtension, by adding modules here that should skip extension adding.
*/
meta?: ConfigMeta | undefined;
}
interface TraceurOptions {
properTailCalls?: boolean | undefined;
symbols?: boolean | undefined;
arrayComprehension?: boolean | undefined;
asyncFunctions?: boolean | undefined;
asyncGenerators?: any;
forOn?: boolean | undefined;
generatorComprehension?: boolean | undefined;
}
interface Config {
/**
* For custom config names
*/
[customName: string]: any;
/**
* The baseURL provides a special mechanism for loading modules relative to a standard reference URL.
*/
baseURL?: string | undefined;
/**
* Set the Babel transpiler options when System.transpiler is set to babel.
*/
// TODO: Import BabelCore.TransformOptions
babelOptions?: any;
/**
* undles allow a collection of modules to be downloaded together as a package whenever any module from that collection is requested.
* Useful for splitting an application into sub-modules for production. Use with the SystemJS Builder.
*/
bundles?: ModulesList | undefined;
/**
* Backwards-compatibility mode for the loader to automatically add '.js' extensions when not present to module requests.
* This allows code written for SystemJS 0.16 or less to work easily in the latest version:
*/
defaultJSExtensions?: boolean | undefined;
/**
* An alternative to bundling providing a solution to the latency issue of progressively loading dependencies.
* When a module specified in depCache is loaded, asynchronous loading of its pre-cached dependency list begins in parallel.
*/
depCache?: ModulesList | undefined;
/**
* The map option is similar to paths, but acts very early in the normalization process.
* It allows you to map a module alias to a location or package:
*/
map?: ConfigMap | undefined;
/**
* Module meta provides an API for SystemJS to understand how to load modules correctly.
* Meta is how we set the module format of a module, or know how to shim dependencies of a global script.
*/
meta?: ConfigMeta | undefined;
/**
* Packages provide a convenience for setting meta and map configuration that is specific to a common path.
* In addition packages allow for setting contextual map configuration which only applies within the package itself.
* This allows for full dependency encapsulation without always needing to have all dependencies in a global namespace.
*/
packages?: PackageList<PackageConfig> | undefined;
/**
* The ES6 Module Loader paths implementation, applied after normalization and supporting subpaths via wildcards.
* It is usually advisable to use map configuration over paths unless you need strict control over normalized module names.
*/
paths?: PackageList<string> | undefined;
/**
* Set the Traceur compilation options.
*/
traceurOptions?: TraceurOptions | undefined;
/**
* Sets the module name of the transpiler to be used for loading ES6 modules.
*/
transpiler?: Transpiler | undefined;
trace?: boolean | undefined;
/**
* Sets the TypeScript transpiler options.
*/
// TODO: Import Typescript.CompilerOptions
typescriptOptions?: {
/**
* A boolean flag which instructs the plugin to load configuration from "tsconfig.json".
* To override the location of the file set this option to the path of the configuration file,
* which will be resolved using normal SystemJS resolution.
* Note: This setting is specific to plugin-typescript.
*/
tsconfig?: boolean | string | undefined,
[key: string]: any
} | undefined;
}
interface System extends Config {
/**
* For backwards-compatibility with AMD environments, set window.define = System.amdDefine.
*/
amdDefine(...args: any[]): void;
/**
* For backwards-compatibility with AMD environments, set window.require = System.amdRequire.
*/
amdRequire(deps: string[], callback: (...modules: any[]) => void): void;
/**
* SystemJS configuration helper function.
* Once SystemJS has loaded, configuration can be set on SystemJS by using the configuration function System.config.
*/
config(config: Config): void;
/**
* This represents the System base class, which can be extended or reinstantiated to create a custom System instance.
*/
constructor: new () => System;
/**
* Deletes a module from the registry by normalized name.
*/
delete(moduleName: string): void;
/**
* Returns a module from the registry by normalized name.
*/
get(moduleName: string): any;
/**
* Returns a clone of the internal SystemJS configuration in use.
*/
getConfig(): Config;
/**
* Returns whether a given module exists in the registry by normalized module name.
*/
has(moduleName: string): boolean;
/**
* Loads a module by name taking an optional normalized parent name argument.
* Promise resolves to the module value.
*/
import(moduleName: string, normalizedParentName?: string): Promise<any>;
/**
* Given any object, returns true if the object is either a SystemJS module or native JavaScript module object, and false otherwise.
* Useful for interop scenarios.
*/
isModule(object: any): boolean;
/**
* Given a plain JavaScript object, return an equivalent Module object.
* Useful when writing a custom instantiate hook or using System.set.
*/
newModule(object: any): any;
/**
* Declaration function for defining modules of the System.register polyfill module format.
*/
register(name: string, deps: string[], declare: (...modules: any[]) => any): void;
register(deps: string[], declare: (...modules: any[]) => any): void;
/**
* Companion module format to System.register for non-ES6 modules.
* Provides a <script>-injection-compatible module format that any CommonJS or Global module can be converted into for CSP compatibility.
*/
registerDynamic(name: string, deps: string[], executingRequire: boolean, declare: (...modules: any[]) => any): void;
registerDynamic(deps: string[], executingRequire: boolean, declare: (...modules: any[]) => any): void;
/**
* Sets a module into the registry directly and synchronously.
* Typically used along with System.newModule to create a valid Module object:
*/
set(moduleName: string, module: any): void;
/**
* Resolves module name to normalized URL.
*/
resolve(moduleName: string, parentName?: string): Promise<string>;
/**
* Resolves module name to normalized URL.
* Synchronous alternative to `SystemJS.resolve`.
*/
resolveSync(moduleName: string, parentName?: string): string;
/**
* In CommonJS environments, SystemJS will substitute the global require as needed by the module format being
* loaded to ensure the correct detection paths in loaded code.
* The CommonJS require can be recovered within these modules from System._nodeRequire.
*/
_nodeRequire(dep: string): any;
/**
* Modules list available only with trace=true
*/
loads: PackageList<any>;
env: string;
loaderErrorStack: boolean;
packageConfigPaths: string[];
/**
* Specify a value of true to have SystemJS conform to the AMD-style plugin syntax, e.g. "text!some/file.txt", over the default of "some/file.txt!text".
*/
pluginFirst: boolean;
version: string;
/**
* Enables the output of warnings to the console, including deprecation messages.
*/
warnings: boolean;
}
} | the_stack |
/// <reference path="../../elements.d.ts" />
import { EditPage } from '../../elements';
import { Polymer } from '../../../../tools/definitions/polymer';
import { I18NKeys } from '../../../_locales/i18n-keys';
declare const browserAPI: browserAPI;
namespace CrmEditPageElement {
export const crmEditPageProperties: {
item: CRM.Node;
nodeInfo: CRM.NodeInfo;
hideUpdateMessage: boolean;
} = {
/**
* The item to edit
*/
item: {
type: Object,
value: null,
notify: true
},
/**
* The nodeInfo to display
*/
nodeInfo: {
type: Object,
value: {},
notify: true
},
/**
* Whether to hide the update message
*/
hideUpdateMessage: {
type: Boolean,
value: true,
notify: true
}
} as any;
export class CEP {
static is: string = 'crm-edit-page';
/**
* Whether the item is a link
*/
static isLink: boolean = false;
/**
* Whether the item is a script
*/
static isScript: boolean = false;
/**
* Whether the item is a divider
*/
static isDivider: boolean = false;
/**
* Whether the item is a menu
*/
static isMenu: boolean = false;
/**
* Whether the item is a stylesheet
*/
static isStylesheet: boolean = false;
/**
* The link item
*/
static linkItem: CRM.LinkNode = {} as any;
/**
* The script item
*/
static scriptItem: CRM.ScriptNode = {} as any;
/**
* The divider item
*/
static dividerItem: CRM.DividerNode = {} as any;
/**
* The menu item
*/
static menuItem: CRM.MenuNode = {} as any;
/**
* The stylesheet item
*/
static stylesheetItem: CRM.StylesheetNode = {} as any;
/**
* The backdrop element associated with the current dialog
*/
private static _backdropEl: HTMLElement;
static properties = crmEditPageProperties;
static listeners = {
"neon-animation-finish": '_onNeonAnimationFinish'
};
static getCreatedBy(this: CrmEditPage) {
return this.___(I18NKeys.options.crmEditPage.createdByYou,
`<b id="nodeInfoByYou" title="${
this.___(I18NKeys.options.crmEditPage.hasAllPermissions)
}">You</b>`)
}
static nodeInfoSource(this: CrmEditPage) {
return this.___(this.isLocal(this.nodeInfo.source) ?
I18NKeys.options.crmEditPage.createdOn :
I18NKeys.options.crmEditPage.installedOn,
`<b title="${
this.getInstallDate()
}">${this.nodeInfo.installDate}</b>`)
}
static isLocal(this: CrmEditPage, source: {
updateURL?: string;
downloadURL?: string;
url?: string;
author?: string;
}|string): source is string {
if (!source) {
return true;
}
return source === 'local' || this.item.isLocal;
};
private static _nodeInfoExists(nodeInfo: CRM.NodeInfo): boolean {
return !!nodeInfo;
};
static hideNodeInfo(this: CrmEditPage, nodeInfo: CRM.NodeInfo): boolean {
return !this._nodeInfoExists(nodeInfo) ||
(this.isLocal(nodeInfo.source) && !this.hasInstallDate(nodeInfo));
};
static hasInstallDate(nodeInfo: CRM.NodeInfo): boolean {
return this._nodeInfoExists(nodeInfo) && !!nodeInfo.installDate;
};
private static _onAnimationDone(this: CrmEditPage) {
this._backdropEl.classList.remove('visible');
this._backdropEl.classList.remove('clickthrough');
this.$.overlayCont.style.display = 'none';
document.body.style.overflow = 'auto';
document.body.style.marginRight = '0';
window.app.show = false;
window.app.item = null;
this._unassignItems();
};
private static _unassignItems(this: CrmEditPage) {
this.isLink = this.isScript = this.isStylesheet = this.isMenu = this.isDivider = false;
this.linkItem = this.scriptItem = this.stylesheetItem = this.menuItem = this.dividerItem = {} as any;
};
private static _animateIn(this: CrmEditPage) {
this._backdropEl.classList.add('visible');
this._backdropEl.classList.add('animateIn');
document.body.style.overflow = 'hidden';
document.body.style.marginRight = '17px';
window.app.show = true;
this.$.overlayCont.style.display = 'block';
return window.animateTransform(this.$.overlayCont, {
propName: 'scale',
postfix: '',
from: 0,
to: 1
}, {
duration: 300,
easing: 'cubic-bezier(0.215, 0.610, 0.355, 1.000)',
fill: 'both'
});
};
static animateOut(this: CrmEditPage) {
this._backdropEl.classList.remove('animateIn');
//Make it clickthrough-able already
this._backdropEl.classList.add('clickthrough');
const animation = window.animateTransform(this.$.overlayCont, {
propName: 'scale',
postfix: '',
from: 1,
to: 0
}, {
duration: 500,
easing: 'cubic-bezier(0.215, 0.610, 0.355, 1.000)',
fill: 'both'
});
animation.onfinish = () => {
this._onAnimationDone();
}
document.body.parentElement.style.overflow = 'auto';
};
static updateName(this: CrmEditPage, value: any) {
this.notifyPath('item.name', value);
};
static showUpgradeNotice(hideUpdateMessage: boolean, node: CRM.Node): boolean {
return !hideUpdateMessage && (node && node.type === 'script' && node.value && node.value.updateNotice);
};
static getScriptUpdateStatus(node: CRM.Node): string {
if (node) {
if (window.app.storageLocal.upgradeErrors) {
if (window.app.storageLocal.upgradeErrors[node.id]) {
return 'Some errors have occurred in updating this script. Please resolve them by clicking the link and replace any chrome ' +
'calls on error lines with their CRM API equivalent.';
}
}
return 'No errors have been detected in updating this script but this is no guarantee it will work, be sure to test it at least once.';
}
return '';
};
static hideUpdateMergeDialog(this: CrmEditPage) {
if (this.showUpgradeNotice(this.hideUpdateMessage, this.item)) {
var height = this.$.scriptUpdateNotice.getBoundingClientRect().height;
var marginBot = '-' + height + 'px';
this.$.scriptUpdateNotice.animate([
{
marginBottom: '0px'
}, {
marginBottom: marginBot
}
], {
duration: 350,
easing: 'cubic-bezier(0.215, 0.610, 0.355, 1.000)'
}).onfinish = () => {
this.$.scriptUpdateNotice.style.marginBottom = marginBot;
this.hideUpdateMessage = true;
};
}
window.scriptEdit.newSettings.value.updateNotice = false;
};
static showScriptUpdateDiff(this: CrmEditPage) {
var oldScript = (this.item as CRM.ScriptNode).value.oldScript;
var newScript = (this.item as CRM.ScriptNode).value.script;
const chooseDialog = window.doc.externalEditorChooseFile;
chooseDialog.init(oldScript, newScript, (chosenScript: string) => {
if (window.app.storageLocal.upgradeErrors) {
delete window.app.storageLocal.upgradeErrors[this.item.id];
}
const editor = window.scriptEdit.editorManager.editor;
if (!window.scriptEdit.editorManager.isDiff(editor)) {
editor.setValue(chosenScript);
}
setTimeout(() => {
this.hideUpdateMergeDialog();
}, 250);
browserAPI.storage.local.set({
upgradeErrors: window.app.storageLocal.upgradeErrors || {}
} as any);
}, true, window.app.storageLocal.upgradeErrors && window.app.storageLocal.upgradeErrors[this.item.id]);
window.externalEditor.showMergeDialog(oldScript, newScript);
chooseDialog.open();
};
static getInstallDate(this: CrmEditPage) {
if (window.Intl && typeof window.Intl === 'object' && this.nodeInfo &&
this.nodeInfo.installDate) {
const format = (new Date(2016, 1, 13).toLocaleDateString() === '13-2-2016' ? 'eu' : 'na');
let date;
if (format === 'eu') {
date = this.nodeInfo.installDate.split('-');
date = date[1] + '-' + date[0] + '-' + date[2];
} else {
date = this.nodeInfo.installDate;
}
date = new Date(date);
return Math.floor(new Date(Date.now() - date.getMilliseconds()).getMilliseconds() / (1000 * 60 * 60 * 24)) + ' days ago';
}
return null;
};
static ready(this: CrmEditPage) {
$(this.$$('.popupCont')).click(function(e) {
e.stopPropagation();
});
window.onExists('app').then(() => {
this._backdropEl = window.app.$$('.backdropCont');
window.crmEditPage = this;
this.isLink = this.isMenu = this.isScript = this.isDivider = false;
this.$.nodeInfoVersion.addEventListener('input', () => {
this.item.nodeInfo.version = this.$.nodeInfoVersion.innerText.length > 0 ?
this.$.nodeInfoVersion.innerText : '1.0';
});
});
};
static init(this: CrmEditPage) {
const valueStorer: {
isScript: boolean;
isLink: boolean;
isMenu: boolean;
isDivider: boolean;
isStylesheet: boolean;
} = {
isScript: false,
isLink: false,
isDivider: false,
isMenu: false,
isStylesheet: false
};
this.hideUpdateMessage = false;
this.scriptItem = this.linkItem = this.dividerItem = this.menuItem = this.stylesheetItem = {} as any;
const node = this.item;
if ((valueStorer.isScript = node.type === 'script')) {
this.scriptItem = node as CRM.ScriptNode;
valueStorer.isLink = valueStorer.isMenu = valueStorer.isDivider = valueStorer.isStylesheet = false;
} else if ((valueStorer.isLink = node.type === 'link')) {
this.linkItem = node as CRM.LinkNode;
valueStorer.isMenu = valueStorer.isDivider = valueStorer.isStylesheet = false;
} else if ((valueStorer.isStylesheet = node.type === 'stylesheet')) {
this.stylesheetItem = node as CRM.StylesheetNode;
valueStorer.isMenu = valueStorer.isDivider = false;
} else if ((valueStorer.isMenu = node.type === 'menu')) {
this.menuItem = node as CRM.MenuNode;
valueStorer.isDivider = false;
} else {
valueStorer.isDivider = true;
this.dividerItem = node as CRM.DividerNode;
}
setTimeout(() => {
window.app.show = true;
this.isScript = valueStorer.isScript;
this.isLink = valueStorer.isLink;
this.isMenu = valueStorer.isMenu;
this.isDivider = valueStorer.isDivider;
this.isStylesheet = valueStorer.isStylesheet;
const page = this.shadowRoot.querySelector('#editPageCont > :not([hidden])') as EditPage;
page.init.apply(page);
this._animateIn();
}, 300);
}
}
if (window.objectify) {
window.register(CEP);
} else {
window.addEventListener('RegisterReady', () => {
window.register(CEP);
});
}
}
export type CrmEditPage = Polymer.El<'crm-edit-page',
typeof CrmEditPageElement.CEP & typeof CrmEditPageElement.crmEditPageProperties
>; | the_stack |
import BigNumber from 'bignumber.js';
import { isAddress } from '@parity/abi/lib/util/address';
import { FilterOptions, Options } from '../types';
import {
inAddress,
inAddresses,
inBlockNumber,
inData,
inFilter,
inHash,
inHex,
inNumber10,
inNumber16,
inOptions,
inTraceType,
inDeriveHash,
inDeriveIndex,
inTopics
} from './input';
describe('format/input', () => {
const address = '0x63cf90d3f0410092fc0fca41846f596223979195';
describe('inAddress', () => {
const address = '63cf90d3f0410092fc0fca41846f596223979195';
it('adds the leading 0x as required', () => {
expect(inAddress(address)).toEqual(`0x${address}`);
});
it('returns verified addresses as-is', () => {
expect(inAddress(`0x${address}`)).toEqual(`0x${address}`);
});
it('returns lowercase equivalents', () => {
expect(inAddress(address.toUpperCase())).toEqual(`0x${address}`);
});
it('returns 0x on null addresses', () => {
expect(inAddress()).toEqual('0x');
});
});
describe('inAddresses', () => {
it('handles empty values', () => {
expect(inAddresses()).toEqual([]);
});
it('handles mapping of all addresses in array', () => {
const address = '63cf90d3f0410092fc0fca41846f596223979195';
expect(inAddresses([undefined, address])).toEqual(['0x', `0x${address}`]);
});
});
describe('inBlockNumber()', () => {
it('returns earliest as-is', () => {
expect(inBlockNumber('earliest')).toEqual('earliest');
});
it('returns latest as-is', () => {
expect(inBlockNumber('latest')).toEqual('latest');
});
it('returns pending as-is', () => {
expect(inBlockNumber('pending')).toEqual('pending');
});
it('formats existing BigNumber into hex', () => {
expect(inBlockNumber(new BigNumber(0x123456))).toEqual('0x123456');
});
it('formats hex strings into hex', () => {
expect(inBlockNumber('0x123456')).toEqual('0x123456');
});
it('formats numbers into hex', () => {
expect(inBlockNumber(0x123456)).toEqual('0x123456');
});
});
describe('inData', () => {
it('formats to hex', () => {
expect(inData('123456')).toEqual('0x123456');
});
it('converts a string to a hex representation', () => {
expect(inData('jaco')).toEqual('0x6a61636f');
});
});
describe('inHex', () => {
it('leaves leading 0x as-is', () => {
expect(inHex('0x123456')).toEqual('0x123456');
});
it('adds a leading 0x', () => {
expect(inHex('123456')).toEqual('0x123456');
});
it('returns uppercase as lowercase (leading 0x)', () => {
expect(inHex('0xABCDEF')).toEqual('0xabcdef');
});
it('returns uppercase as lowercase (no leading 0x)', () => {
expect(inHex('ABCDEF')).toEqual('0xabcdef');
});
it('handles empty & null', () => {
expect(inHex()).toEqual('0x');
expect(inHex('')).toEqual('0x');
});
});
describe('inHash', () => {
it('leaves leading 0x as-is', () => {
expect(inHash('0x123456')).toEqual('0x123456');
});
});
describe('inFilter', () => {
['address' as keyof FilterOptions].forEach(input => {
it(`formats ${input} address as address`, () => {
const block: FilterOptions = {};
// @ts-ignore
block[input] = address;
const formatted = inFilter(block)[input];
expect(isAddress(formatted as string)).toBe(true);
expect(formatted).toEqual(address);
});
});
(['fromBlock', 'toBlock'] as (keyof FilterOptions)[]).forEach(input => {
it(`formats ${input} number as blockNumber`, () => {
const block: FilterOptions = {};
// @ts-ignore
block[input] = 0x123;
const formatted = inFilter(block)[input];
expect(formatted).toEqual('0x123');
});
});
it('ignores and passes through unknown keys', () => {
expect(inFilter({ someRandom: 'someRandom' } as any)).toEqual({
someRandom: 'someRandom'
});
});
it('formats an filter options object with relevant entries converted', () => {
expect(
inFilter({
address: address,
fromBlock: 'latest',
toBlock: 0x101,
extraData: 'someExtraStuffInHere',
limit: 0x32
})
).toEqual({
address: address,
fromBlock: 'latest',
toBlock: '0x101',
extraData: 'someExtraStuffInHere',
limit: 50
});
});
});
describe('inNumber10()', () => {
it('formats existing BigNumber into number', () => {
expect(inNumber10(new BigNumber(123))).toEqual(123);
});
it('formats hex strings into decimal', () => {
expect(inNumber10('0x0a')).toEqual(10);
});
it('formats numbers into number', () => {
expect(inNumber10(123)).toEqual(123);
});
it('formats undefined into 0', () => {
expect(inNumber10()).toEqual(0);
});
});
describe('inNumber16()', () => {
it('formats existing BigNumber into hex', () => {
expect(inNumber16(new BigNumber(0x123456))).toEqual('0x123456');
});
it('formats hex strings into hex', () => {
expect(inNumber16('0x123456')).toEqual('0x123456');
});
it('formats numbers into hex', () => {
expect(inNumber16(0x123456)).toEqual('0x123456');
});
it('formats undefined into 0', () => {
expect(inNumber16()).toEqual('0x0');
});
});
describe('inOptions', () => {
['data' as keyof Options].forEach(input => {
it(`converts ${input} to hex data`, () => {
const block: Options = {};
block[input] = '1234';
const formatted = inData(block[input]);
expect(formatted).toEqual('0x1234');
});
});
(['from', 'to'] as (keyof Options)[]).forEach(input => {
it(`formats ${input} address as address`, () => {
const block: Options = {};
block[input] = address;
const formatted = inOptions(block)[input] as string;
expect(isAddress(formatted)).toBe(true);
expect(formatted).toEqual(address);
});
});
it('does not encode an empty `to` value', () => {
const options = { to: '' };
const formatted = inOptions(options);
expect(formatted.to).toEqual(undefined);
});
(['gas', 'gasPrice', 'value', 'nonce'] as (keyof Options)[]).forEach(
input => {
it(`formats ${input} number as hexnumber`, () => {
const block: Options = {};
block[input] = 0x123;
const formatted = inOptions(block)[input];
expect(formatted).toEqual('0x123');
});
}
);
it('passes condition as null when specified as such', () => {
expect(inOptions({ condition: null })).toEqual({ condition: null });
});
it('ignores and passes through unknown keys', () => {
expect(inOptions({ someRandom: 'someRandom' } as any)).toEqual({
someRandom: 'someRandom'
});
});
it('formats an options object with relevant entries converted', () => {
expect(
inOptions({
from: address,
to: address,
gas: new BigNumber('0x100'),
gasPrice: 0x101,
value: 258,
nonce: '0x104',
data: '0123456789',
extraData: 'someExtraStuffInHere'
})
).toEqual({
from: address,
to: address,
gas: '0x100',
gasPrice: '0x101',
value: '0x102',
nonce: '0x104',
data: '0x0123456789',
extraData: 'someExtraStuffInHere'
});
});
});
describe('inTraceType', () => {
it('returns array of types as is', () => {
const types = ['vmTrace', 'trace', 'stateDiff'];
expect(inTraceType(types)).toEqual(types);
});
it('formats single string type into array', () => {
const type = 'vmTrace';
expect(inTraceType(type)).toEqual([type]);
});
});
describe('inDeriveHash', () => {
it('returns derive hash', () => {
expect(inDeriveHash(1)).toEqual({
hash: '0x1',
type: 'soft'
});
expect(inDeriveHash(null)).toEqual({
hash: '0x',
type: 'soft'
});
expect(
inDeriveHash({
hash: 5
})
).toEqual({
hash: '0x5',
type: 'soft'
});
expect(
inDeriveHash({
hash: 5,
type: 'hard'
})
).toEqual({
hash: '0x5',
type: 'hard'
});
});
});
describe('inDeriveIndex', () => {
it('returns derive hash', () => {
expect(inDeriveIndex(null)).toEqual([]);
expect(inDeriveIndex([])).toEqual([]);
expect(inDeriveIndex([1])).toEqual([
{
index: 1,
type: 'soft'
}
]);
expect(
inDeriveIndex({
index: 1
})
).toEqual([
{
index: 1,
type: 'soft'
}
]);
expect(
inDeriveIndex([
{
index: 1,
type: 'hard'
},
5
])
).toEqual([
{
index: 1,
type: 'hard'
},
{
index: 5,
type: 'soft'
}
]);
});
});
describe('inTopics', () => {
it('returns empty array when no inputs provided', () => {
expect(inTopics()).toEqual([]);
});
it('keeps null topic as null', () => {
expect(inTopics([null])).toEqual([null]);
});
it('pads topics as received', () => {
expect(inTopics(['123'])).toEqual([
'0x0000000000000000000000000000000000000000000000000000000000000123'
]);
});
it('handles nested arrays', () => {
expect(inTopics([null, '123', ['456', null, '789']])).toEqual([
null,
'0x0000000000000000000000000000000000000000000000000000000000000123',
[
'0x0000000000000000000000000000000000000000000000000000000000000456',
null,
'0x0000000000000000000000000000000000000000000000000000000000000789'
]
]);
});
});
}); | the_stack |
import * as chai from 'chai';
import * as sinon from 'sinon';
import AudioVideoController from '../../src/audiovideocontroller/AudioVideoController';
import AudioVideoControllerState from '../../src/audiovideocontroller/AudioVideoControllerState';
import NoOpAudioVideoController from '../../src/audiovideocontroller/NoOpAudioVideoController';
import FullJitterBackoff from '../../src/backoff/FullJitterBackoff';
import BrowserBehavior from '../../src/browserbehavior/BrowserBehavior';
import DefaultBrowserBehavior from '../../src/browserbehavior/DefaultBrowserBehavior';
import ConnectionMonitor from '../../src/connectionmonitor/ConnectionMonitor';
import Logger from '../../src/logger/Logger';
import NoOpMediaStreamBroker from '../../src/mediastreambroker/NoOpMediaStreamBroker';
import DefaultReconnectController from '../../src/reconnectcontroller/DefaultReconnectController';
import TimeoutScheduler from '../../src/scheduler/TimeoutScheduler';
import DefaultSignalingClient from '../../src/signalingclient/DefaultSignalingClient';
import SignalingClient from '../../src/signalingclient/SignalingClient';
import SignalingClientConnectionRequest from '../../src/signalingclient/SignalingClientConnectionRequest';
import DefaultStatsCollector from '../../src/statscollector/DefaultStatsCollector';
import CleanStoppedSessionTask from '../../src/task/CleanStoppedSessionTask';
import Task from '../../src/task/Task';
import DefaultTransceiverController from '../../src/transceivercontroller/DefaultTransceiverController';
import { wait as delay } from '../../src/utils/Utils';
import NoVideoDownlinkBandwidthPolicy from '../../src/videodownlinkbandwidthpolicy/NoVideoDownlinkBandwidthPolicy';
import VideoAdaptiveProbePolicy from '../../src/videodownlinkbandwidthpolicy/VideoAdaptiveProbePolicy';
import VideoTile from '../../src/videotile/VideoTile';
import DefaultVideoTileController from '../../src/videotilecontroller/DefaultVideoTileController';
import DefaultVideoTileFactory from '../../src/videotilefactory/DefaultVideoTileFactory';
import NoVideoUplinkBandwidthPolicy from '../../src/videouplinkbandwidthpolicy/NoVideoUplinkBandwidthPolicy';
import NScaleVideoUplinkBandwidthPolicy from '../../src/videouplinkbandwidthpolicy/NScaleVideoUplinkBandwidthPolicy';
import DefaultWebSocketAdapter from '../../src/websocketadapter/DefaultWebSocketAdapter';
import DOMMockBehavior from '../dommock/DOMMockBehavior';
import DOMMockBuilder from '../dommock/DOMMockBuilder';
describe('CleanStoppedSessionTask', () => {
const expect: Chai.ExpectStatic = chai.expect;
const RECONNECT_TIMEOUT_MS = 120 * 1000;
const RECONNECT_FIXED_WAIT_MS = 0;
const RECONNECT_SHORT_BACKOFF_MS = 1 * 1000;
const RECONNECT_LONG_BACKOFF_MS = 5 * 1000;
const behavior = new DOMMockBehavior();
const browserBehavior = new DefaultBrowserBehavior();
let context: AudioVideoControllerState;
let domMockBuilder: DOMMockBuilder | null = null;
let task: Task;
let webSocketAdapter: DefaultWebSocketAdapter;
let signalingClient: SignalingClient;
let request: SignalingClientConnectionRequest;
class TestStatsCollector extends DefaultStatsCollector {
constructor(
audioVideoController: AudioVideoController,
logger: Logger,
browser: BrowserBehavior
) {
super(audioVideoController, logger, browser);
}
start(): boolean {
return false;
}
stop(): void {}
}
class TestConnectionMonitor implements ConnectionMonitor {
start(): void {}
stop(): void {}
}
beforeEach(async () => {
domMockBuilder = new DOMMockBuilder(behavior);
context = new AudioVideoControllerState();
context.audioVideoController = new NoOpAudioVideoController();
context.logger = context.audioVideoController.logger;
context.realtimeController = context.audioVideoController.realtimeController;
context.videoTileController = context.audioVideoController.videoTileController;
context.mediaStreamBroker = new NoOpMediaStreamBroker();
context.statsCollector = new TestStatsCollector(
context.audioVideoController,
context.logger,
browserBehavior
);
context.connectionMonitor = new TestConnectionMonitor();
context.transceiverController = new DefaultTransceiverController(
context.logger,
browserBehavior
);
context.videoUplinkBandwidthPolicy = new NoVideoUplinkBandwidthPolicy();
context.videoDownlinkBandwidthPolicy = new NoVideoDownlinkBandwidthPolicy();
context.videoTileController = new DefaultVideoTileController(
new DefaultVideoTileFactory(),
context.audioVideoController,
context.logger
);
// @ts-ignore
const audioTrack = new MediaStreamTrack('attach-media-input-task-audio-track-id', 'audio');
// @ts-ignore
const videoTrack = new MediaStreamTrack('attach-media-input-task-video-track-id', 'video');
context.activeAudioInput = new MediaStream();
context.activeAudioInput.addTrack(audioTrack);
context.activeVideoInput = new MediaStream();
context.activeVideoInput.addTrack(videoTrack);
context.reconnectController = new DefaultReconnectController(
RECONNECT_TIMEOUT_MS,
new FullJitterBackoff(
RECONNECT_FIXED_WAIT_MS,
RECONNECT_SHORT_BACKOFF_MS,
RECONNECT_LONG_BACKOFF_MS
)
);
class Observer {
removeObserver(): void {}
}
context.removableObservers = [new Observer()];
webSocketAdapter = new DefaultWebSocketAdapter(context.logger);
signalingClient = new DefaultSignalingClient(webSocketAdapter, context.logger);
context.signalingClient = signalingClient;
context.peer = new RTCPeerConnection();
request = new SignalingClientConnectionRequest('ws://localhost:9999/control', 'test-auth');
context.signalingClient.openConnection(request);
await delay(behavior.asyncWaitMs + 10);
task = new CleanStoppedSessionTask(context);
});
afterEach(() => {
domMockBuilder.cleanup();
});
describe('run', () => {
it('closes the connection', done => {
expect(context.signalingClient.ready()).to.equal(true);
task.run().then(() => {
expect(context.signalingClient.ready()).to.equal(false);
done();
});
});
it('does not close the connection if already closed', done => {
expect(context.signalingClient.ready()).to.equal(true);
context.signalingClient.closeConnection();
task.run().then(() => {
expect(context.signalingClient.ready()).to.equal(false);
done();
});
});
it('sets audio and video input to null', done => {
task.run().then(() => {
expect(context.activeAudioInput).to.not.be.null;
expect(context.activeVideoInput).to.not.be.null;
done();
});
});
it('does not release audio and video device', done => {
let releaseFunctionCalled = false;
class MockMediaStreamBroker extends NoOpMediaStreamBroker {
releaseMediaStream(_mediaStream: MediaStream): void {
releaseFunctionCalled = true;
}
}
context.mediaStreamBroker = new MockMediaStreamBroker();
task.run().then(() => {
expect(releaseFunctionCalled).to.not.be.true;
done();
});
});
it('clears local video if exists', done => {
context.activeAudioInput = null;
const videoInput = new MediaStream();
context.activeVideoInput = videoInput;
class TestVideoTileController extends DefaultVideoTileController {
private testLocalTile: VideoTile | null = null;
startLocalVideoTile(): number {
this.testLocalTile = this.addVideoTile();
this.testLocalTile.bindVideoStream(
'fake-id',
true,
context.activeVideoInput,
640,
480,
0
);
return this.testLocalTile.id();
}
getLocalVideoTile(): VideoTile {
return this.testLocalTile;
}
}
context.videoTileController = new TestVideoTileController(
new DefaultVideoTileFactory(),
context.audioVideoController,
context.logger
);
context.videoTileController.startLocalVideoTile();
task.run().then(() => {
const localTile = context.videoTileController.getLocalVideoTile();
expect(localTile.state().boundVideoStream).to.equal(null);
done();
});
});
it('clear transceiver from video uplink bandwidth if needed', done => {
context.videoUplinkBandwidthPolicy = new NScaleVideoUplinkBandwidthPolicy('');
context.videoUplinkBandwidthPolicy.setTransceiverController(context.transceiverController);
const spy = sinon.spy(context.videoUplinkBandwidthPolicy, 'setTransceiverController');
task.run().then(() => {
expect(spy.calledOnceWith(undefined)).to.be.true;
done();
});
});
it('clear tile controller from video downlink bandwidth if needed', done => {
context.videoDownlinkBandwidthPolicy = new VideoAdaptiveProbePolicy(context.logger);
context.videoDownlinkBandwidthPolicy.bindToTileController(
new DefaultVideoTileController(
new DefaultVideoTileFactory(),
context.audioVideoController,
context.logger
)
);
const spy = sinon.spy(context.videoDownlinkBandwidthPolicy, 'bindToTileController');
task.run().then(() => {
expect(spy.calledOnceWith(undefined)).to.be.true;
done();
});
});
it('clears local audio only', async () => {
context.activeAudioInput = new MediaStream();
context.activeVideoInput = null;
await task.run();
expect(context.activeAudioInput).to.not.be.null;
expect(context.activeVideoInput).to.be.null;
});
it('stops the stats collector the connection monitor', async () => {
const statsCollectorSpy = sinon.spy(context.statsCollector, 'stop');
const connectionMonitorSpy = sinon.spy(context.connectionMonitor, 'stop');
await task.run();
expect(statsCollectorSpy.called).to.be.true;
expect(connectionMonitorSpy.called).to.be.true;
});
it('removes all video tiles', async () => {
const removeAllVideoTilesSpy = sinon.spy(context.videoTileController, 'removeAllVideoTiles');
await task.run();
expect(removeAllVideoTilesSpy.called).to.be.true;
});
it('closes the peer', async () => {
const peerSpy = sinon.spy(context.peer, 'close');
await task.run();
expect(peerSpy.called).to.be.true;
});
it('can be run when the peer is not available', done => {
context.peer = null;
task.run().then(done);
});
it('continues to clean up remaining audio-video state regardless of the close connection failure', done => {
class TestSignalingClient extends DefaultSignalingClient {
ready(): boolean {
return true;
}
closeConnection(): void {
throw new Error();
}
}
context.signalingClient = new TestSignalingClient(webSocketAdapter, context.logger);
expect(context.peer).to.not.be.null;
task
.run()
.then(() => {})
.catch(() => {
expect(context.peer).to.be.null;
done();
});
});
});
describe('cancel', () => {
it('should cancel the task and throw an error but should clean up remaining audio-video state', done => {
expect(context.peer).to.not.be.null;
task
.run()
.then(() => {
expect(context.peer).to.be.null;
})
.catch(() => {
done();
});
task.cancel();
});
it('should not close the WebSocket connection if the message is not the WebSocket closed event', done => {
expect(context.signalingClient.ready()).to.equal(true);
class TestSignalingClient extends DefaultSignalingClient {
ready(): boolean {
return true;
}
closeConnection(): void {}
}
context.signalingClient = new TestSignalingClient(webSocketAdapter, context.logger);
task
.run()
.then(() => {
done('This line should not be reached.');
})
.catch(() => {
expect(context.peer).to.be.null;
done();
});
context.signalingClient.leave();
new TimeoutScheduler(behavior.asyncWaitMs).start(() => {
task.cancel();
});
});
it('will cancel idempotently', async () => {
task.cancel();
task.cancel();
try {
await task.run();
} catch (_err) {}
});
});
}); | the_stack |
import * as React from 'react';
import * as ReactDOM from 'react-dom';
import assert from '../../common/assert';
import { Types } from '../../common/Interfaces';
import * as _ from './../utils/lodashMini';
function getPosition(el: HTMLElement): { left: number; top: number } {
return {
left: el.offsetLeft,
top: el.offsetTop,
};
}
type ChildKey = string | number;
function extractChildrenKeys(children: React.ReactNode | undefined): ChildKey[] {
const keys: ChildKey[] = [];
if (children) {
React.Children.forEach(children, function(child, index) {
if (child) {
const childReactElement = child as React.ReactElement<any>;
assert(
childReactElement.key !== undefined && childReactElement.key !== null,
'Children passed to a `View` with child animations enabled must have a `key`',
);
if (childReactElement.key !== null) {
keys.push(childReactElement.key);
}
}
});
}
return keys;
}
// Returns true if the children were edited (e.g. an item was added, moved, or removed).
// We use this information to determine whether or not we'll need to play any list edit
// animations.
function childrenEdited(prevChildrenKeys: ChildKey[], nextChildrenKeys: ChildKey[]): boolean {
return !_.isEqual(prevChildrenKeys, nextChildrenKeys);
}
interface ChildrenMap {
[key: string]: React.ReactElement<any>;
}
function createChildrenMap(children: React.ReactNode | undefined): ChildrenMap {
const map: ChildrenMap = {};
if (children) {
React.Children.forEach(children, function(child, index) {
if (child) {
const childReactElement = child as React.ReactElement<any>;
assert(
'key' in childReactElement,
'Children passed to a `View` with child animations enabled must have a `key`',
);
const index = childReactElement.key;
if (index !== null) {
map[index] = childReactElement;
}
}
});
}
return map;
}
function computePositions(refs: { [key: string]: MountedChildrenRef }) {
const positions: {[key: string]: { left: number; top: number }} = {};
_.each(refs, (ref, key) => {
positions[key] = getPosition(ref.domElement);
});
return positions;
}
export interface AddEdit {
element: React.Component<any, any> | Element;
}
export interface MoveEdit {
element: React.Component<any, any> | Element;
leftDelta: number;
topDelta: number;
}
export interface RemoveEdit {
element: React.Component<any, any> | Element;
leftDelta: number;
topDelta: number;
}
export interface Edits {
added: AddEdit[];
moved: MoveEdit[];
removed: RemoveEdit[];
}
// The states the React component can be in.
enum ComponentPhaseEnum {
// The rest state. The component is not in the middle of anything.
rest,
// The component is about to play an animation. This occurs when the component
// detected a list edit in componentWillUpdate but hasn't yet gotten the opportunity
// to start the animation in componentDidUpdate.
willAnimate,
// The component is in the middle of playing an animation. The component should not
// rerender while in this state.
animating
}
// Pieces of information we calculate in componentWillUpdate and consume in componentDidUpdate.
// More specifically, we calculate animation information in componentWillUpdate and start the
// animation in componentDidUpdate.
interface WillAnimatePhaseInfo {
added: React.ReactElement<any>[];
removed: React.ReactElement<any>[];
other: React.ReactElement<any>[];
prevPositions: {[key: string]: { left: number; top: number }};
prevChildrenMap: ChildrenMap;
}
interface MountedChildrenRef {
reactElement: React.Component<any, any> | Element;
domElement: HTMLElement;
}
export interface MonitorListEditsProps extends React.HTMLAttributes<any> {
// Called when list edits are detected. Gives you an opportunity to animate them.
// Call `done` when the animations are finished. Until `done` is called, the component
// will refuse to rerender.
componentWillAnimate: (edits: Edits, done: () => void) => void;
testId?: string;
}
export class MonitorListEdits extends React.Component<MonitorListEditsProps, Types.Stateless> {
private _itemRefs: { [key: string]: MountedChildrenRef } = {}; // Updated after render but before componentDidUpdate
private _refReplacementCache: {
[key: string]: {
replacement: (ref: React.Component<any, any>) => any;
exisiting: string | ((ref: React.Component<any, any>) => any);
};
} = {};
private _isMounted = false;
// These are assigned in component will mount - will get value before used
private _childrenKeys!: ChildKey[]; // Updated in componentWillUpdate
private _childrenMap!: ChildrenMap; // Updated in componentWillUpdate
// Extracted to don't leak it, shouldn't be a problem as js is singlethreaded
private _childrenToRender!: JSX.Element[];
private _phase: ComponentPhaseEnum = ComponentPhaseEnum.rest;
private _willAnimatePhaseInfo: WillAnimatePhaseInfo | undefined;
UNSAFE_componentWillMount() {
this._childrenKeys = extractChildrenKeys(this.props.children);
this._childrenMap = createChildrenMap(this.props.children);
}
componentDidMount() {
this._isMounted = true;
}
componentWillUnmount() {
this._isMounted = false;
}
shouldComponentUpdate() {
return this._phase !== ComponentPhaseEnum.animating;
}
UNSAFE_componentWillUpdate(nextProps: MonitorListEditsProps) {
assert(
this._phase !== ComponentPhaseEnum.animating,
'componentWillUpdate should never run while the component is animating due to the implementation of shouldComponentUpdate',
);
const prevChildrenKeys = this._childrenKeys;
const nextChildrenKeys = extractChildrenKeys(nextProps.children);
this._childrenKeys = nextChildrenKeys;
if (childrenEdited(prevChildrenKeys, nextChildrenKeys)) {
const prevChildrenMap = this._childrenMap;
const nextChildrenMap = createChildrenMap(nextProps.children);
this._childrenMap = nextChildrenMap;
const removed: React.ReactElement<any>[] = [];
const added: React.ReactElement<any>[] = [];
const other: React.ReactElement<any>[] = [];
Object.keys(prevChildrenMap).forEach(function(key) {
if (!(key in nextChildrenMap)) {
removed.push(prevChildrenMap[key]);
}
});
Object.keys(nextChildrenMap).forEach(function(key) {
if (!(key in prevChildrenMap)) {
added.push(nextChildrenMap[key]);
} else {
other.push(nextChildrenMap[key]);
}
});
this._phase = ComponentPhaseEnum.willAnimate;
this._willAnimatePhaseInfo = {
added: added,
removed: removed,
other: other,
prevPositions: computePositions(this._itemRefs),
prevChildrenMap: prevChildrenMap,
};
}
}
render() {
this._childrenToRender = [];
// We need to cast this to "any" because of a recent bug introduced
// into React @types where children is redfined as ReactNode rather
// than ReactNode[].
_.each(this.props.children as any, child => {
if (child) {
const childElement = child;
let refData = this._refReplacementCache[childElement.key];
// Reuse the cached replacement ref function instead of recreating it every render, unless the child's ref changes.
if (!refData || refData.exisiting !== childElement.ref) {
refData = {
replacement: refValue => { this._saveRef(childElement, refValue); },
exisiting: childElement.ref,
};
this._refReplacementCache[childElement.key] = refData;
}
this._childrenToRender.push(React.cloneElement(childElement, { ref: refData.replacement }));
}
});
if (this._phase === ComponentPhaseEnum.willAnimate) {
_.each(this._willAnimatePhaseInfo!.removed, childElement => {
if (childElement) {
this._childrenToRender.push(React.cloneElement(childElement, {
ref: (refValue: React.Component<any, any>) => {
this._saveRef(childElement, refValue);
},
}));
}
});
}
// Do a shallow clone and remove the props that don't
// apply to div elements.
const props = _.clone(this.props) as MonitorListEditsProps;
delete props.componentWillAnimate;
delete props.testId;
return (
<div { ...props } data-test-id={ this.props.testId }>
{ this._childrenToRender }
</div>
);
}
componentDidUpdate(prevProps: MonitorListEditsProps) {
assert(
this._phase !== ComponentPhaseEnum.animating,
'componentDidUpdate should never run while the component is animating due to the implementation of shouldComponentUpdate',
);
if (this._phase === ComponentPhaseEnum.willAnimate) {
const phaseInfo = this._willAnimatePhaseInfo!;
const prevPositions = phaseInfo.prevPositions;
const nextPositions = computePositions(this._itemRefs);
const added: AddEdit[] = phaseInfo.added.map(child => ({
element: this._itemRefs[(child as any).key].reactElement,
}));
const removed: RemoveEdit[] = phaseInfo.removed.map(child => {
const key = child.key as any;
const prevPos = prevPositions[key];
const nextPos = nextPositions[key];
return {
leftDelta: nextPos.left - prevPos.left,
topDelta: nextPos.top - prevPos.top,
element: this._itemRefs[key].reactElement,
};
});
const moved: MoveEdit[] = [];
phaseInfo.other.map(child => {
const key = child.key as any;
const prevPos = prevPositions[key];
const nextPos = nextPositions[key];
if (prevPos.left !== nextPos.left || prevPos.top !== nextPos.top) {
moved.push({
leftDelta: nextPos.left - prevPos.left,
topDelta: nextPos.top - prevPos.top,
element: this._itemRefs[key].reactElement,
});
}
});
this._phase = ComponentPhaseEnum.animating;
this._willAnimatePhaseInfo = undefined;
this.props.componentWillAnimate({
added: added,
moved: moved,
removed: removed,
}, () => {
this._phase = ComponentPhaseEnum.rest;
if (this._isMounted) {
this.forceUpdate();
}
phaseInfo.removed.forEach(child => {
const key = child.key as any;
delete this._refReplacementCache[key];
});
});
}
}
private _saveRef(reactElement: any, refValue: React.Component<any, any> | null) {
if (refValue === null) {
delete this._itemRefs[reactElement.key];
} else {
// Cache both the react component reference and the corresponding HTML DOM node (for perf reasons).
this._itemRefs[reactElement.key] = {
reactElement: refValue,
domElement: ReactDOM.findDOMNode(refValue) as HTMLElement,
};
}
assert(
typeof reactElement.ref === 'function' || reactElement.ref === undefined || reactElement.ref === null,
'Invalid ref: ' + reactElement.ref + '. Only callback refs are supported when using child animations on a `View`',
);
// If the creator of the reactElement also provided a ref, call it.
if (typeof reactElement.ref === 'function') {
reactElement.ref(refValue);
}
}
} | the_stack |
import { DiffSide, ViewedState } from '../common/comment';
import { ForkDetails } from './githubRepository';
export interface MergedEvent {
__typename: string;
id: string;
databaseId: number;
actor: {
login: string;
avatarUrl: string;
url: string;
};
createdAt: string;
mergeRef: {
name: string;
};
commit: {
oid: string;
commitUrl: string;
};
url: string;
}
export interface HeadRefDeletedEvent {
__typename: string;
id: string;
actor: {
login: string;
avatarUrl: string;
url: string;
};
createdAt: string;
headRefName: string;
}
export interface AbbreviatedIssueComment {
author: {
login: string;
avatarUrl: string;
url: string;
email?: string
};
body: string;
databaseId: number;
}
export interface IssueComment extends AbbreviatedIssueComment {
__typename: string;
authorAssociation: string;
id: string;
url: string;
bodyHTML: string;
updatedAt: string;
createdAt: string;
viewerCanUpdate: boolean;
viewerCanReact: boolean;
viewerCanDelete: boolean;
}
export interface ReactionGroup {
content: string;
viewerHasReacted: boolean;
users: {
totalCount: number;
};
}
export interface ReviewComment {
__typename: string;
id: string;
databaseId: number;
url: string;
author?: {
login: string;
avatarUrl: string;
url: string;
};
path: string;
originalPosition: number;
body: string;
bodyHTML: string;
diffHunk: string;
position: number;
state: string;
pullRequestReview: {
databaseId: number;
};
commit: {
oid: string;
};
originalCommit: {
oid: string;
};
createdAt: string;
replyTo: {
databaseId: number;
};
reactionGroups: ReactionGroup[];
viewerCanUpdate: boolean;
viewerCanDelete: boolean;
}
export interface Commit {
__typename: string;
id: string;
commit: {
author: {
user: {
login: string;
avatarUrl: string;
url: string;
};
};
committer: {
avatarUrl: string;
name: string;
};
oid: string;
message: string;
authoredDate: Date;
};
url: string;
}
export interface AssignedEvent {
__typename: string;
databaseId: number;
actor: {
login: string;
avatarUrl: string;
url: string;
};
user: {
login: string;
avatarUrl: string;
url: string;
};
}
export interface Review {
__typename: string;
id: string;
databaseId: number;
authorAssociation: string;
url: string;
author: {
login: string;
avatarUrl: string;
url: string;
};
state: 'COMMENTED' | 'APPROVED' | 'CHANGES_REQUESTED' | 'PENDING';
body: string;
bodyHTML?: string;
submittedAt: string;
updatedAt: string;
createdAt: string;
}
export interface ReviewThread {
id: string;
isResolved: boolean;
viewerCanResolve: boolean;
viewerCanUnresolve: boolean;
path: string;
diffSide: DiffSide;
line: number;
originalLine: number;
isOutdated: boolean;
comments: {
nodes: ReviewComment[];
};
}
export interface TimelineEventsResponse {
repository: {
pullRequest: {
timelineItems: {
nodes: (MergedEvent | Review | IssueComment | Commit | AssignedEvent | HeadRefDeletedEvent)[];
};
};
};
rateLimit: RateLimit;
}
export interface PendingReviewIdResponse {
node: {
reviews: {
nodes: Review[];
};
};
rateLimit: RateLimit;
}
export interface PullRequestState {
repository: {
pullRequest: {
title: string;
number: number;
state: 'OPEN' | 'CLOSED' | 'MERGED';
};
};
}
export interface PullRequestCommentsResponse {
repository: {
pullRequest: {
reviewThreads: {
nodes: ReviewThread[];
};
};
};
}
export interface MentionableUsersResponse {
repository: {
mentionableUsers: {
nodes: {
login: string;
avatarUrl: string;
name: string;
url: string;
email: string;
}[];
pageInfo: {
hasNextPage: boolean;
endCursor: string;
};
};
};
rateLimit: RateLimit;
}
export interface AssignableUsersResponse {
repository: {
assignableUsers: {
nodes: {
login: string;
avatarUrl: string;
name: string;
url: string;
email: string;
}[];
pageInfo: {
hasNextPage: boolean;
endCursor: string;
};
};
};
rateLimit: RateLimit;
}
export interface AddReviewThreadResponse {
addPullRequestReviewThread: {
thread: ReviewThread;
}
}
export interface AddCommentResponse {
addPullRequestReviewComment: {
comment: ReviewComment;
};
}
export interface AddIssueCommentResponse {
addComment: {
commentEdge: {
node: IssueComment;
};
};
}
export interface EditCommentResponse {
updatePullRequestReviewComment: {
pullRequestReviewComment: ReviewComment;
};
}
export interface EditIssueCommentResponse {
updateIssueComment: {
issueComment: IssueComment;
};
}
export interface MarkPullRequestReadyForReviewResponse {
markPullRequestReadyForReview: {
pullRequest: {
isDraft: boolean;
};
};
}
export interface SubmittedReview extends Review {
comments: {
nodes: ReviewComment[];
};
}
export interface SubmitReviewResponse {
submitPullRequestReview: {
pullRequestReview: SubmittedReview;
};
}
export interface DeleteReviewResponse {
deletePullRequestReview: {
pullRequestReview: {
databaseId: number;
comments: {
nodes: ReviewComment[];
};
};
};
}
export interface AddReactionResponse {
addReaction: {
reaction: {
content: string;
};
subject: {
reactionGroups: ReactionGroup[];
};
};
}
export interface DeleteReactionResponse {
removeReaction: {
reaction: {
content: string;
};
subject: {
reactionGroups: ReactionGroup[];
};
};
}
export interface UpdatePullRequestResponse {
updatePullRequest: {
pullRequest: {
body: string;
bodyHTML: string;
title: string;
};
};
}
export interface RefRepository {
owner: {
login: string;
};
url: string;
}
export interface Ref {
name: string;
repository: RefRepository;
target: {
oid: string;
};
}
export interface SuggestedReviewerResponse {
isAuthor: boolean;
isCommenter: boolean;
reviewer: {
login: string;
avatarUrl: string;
name: string;
url: string;
};
}
export interface PullRequest {
id: string;
databaseId: number;
number: number;
url: string;
state: 'OPEN' | 'CLOSED' | 'MERGED';
body: string;
bodyHTML: string;
title: string;
assignees?: {
nodes: {
login: string;
url: string;
email: string;
avatarUrl: string;
}[];
};
author: {
login: string;
url: string;
avatarUrl: string;
};
comments?: {
nodes: AbbreviatedIssueComment[];
};
createdAt: string;
updatedAt: string;
headRef?: Ref;
headRefName: string;
headRefOid: string;
headRepository?: RefRepository;
baseRef?: Ref;
baseRefName: string;
baseRefOid: string;
baseRepository: RefRepository;
labels: {
nodes: {
name: string;
color: string;
}[];
};
merged: boolean;
mergeable: 'MERGEABLE' | 'CONFLICTING' | 'UNKNOWN';
isDraft?: boolean;
suggestedReviewers: SuggestedReviewerResponse[];
milestone?: {
title: string;
dueOn?: string;
id: string;
createdAt: string;
};
repository?: {
name: string;
owner: {
login: string;
};
url: string;
};
}
export interface PullRequestResponse {
repository: {
pullRequest: PullRequest;
};
rateLimit: RateLimit;
}
export interface IssuesSearchResponse {
search: {
issueCount: number;
pageInfo: {
hasNextPage: boolean;
endCursor: string;
};
edges: {
node: PullRequest;
}[];
};
rateLimit: RateLimit;
}
export interface MilestoneIssuesResponse {
repository: {
milestones: {
nodes: {
dueOn: string;
createdAt: string;
title: string;
id: string;
issues: {
edges: {
node: PullRequest;
}[];
};
}[];
pageInfo: {
hasNextPage: boolean;
endCursor: string;
};
};
};
}
export interface IssuesResponse {
repository: {
issues: {
edges: {
node: PullRequest;
}[];
pageInfo: {
hasNextPage: boolean;
endCursor: string;
};
};
};
}
export interface MaxIssueResponse {
repository: {
issues: {
edges: {
node: {
number: number;
};
}[];
};
};
}
export interface ViewerPermissionResponse {
repository: {
viewerPermission: string;
};
}
export interface ForkDetailsResponse {
repository: ForkDetails;
}
export interface QueryWithRateLimit {
rateLimit: RateLimit;
}
export interface RateLimit {
limit: number;
cost: number;
remaining: number;
resetAt: string;
}
export interface ContributionsCollection {
commitContributionsByRepository: {
contributions: {
nodes: {
occurredAt: string;
}[];
};
repository: {
nameWithOwner: string;
};
}[];
}
export interface UserResponse {
user: {
login: string;
avatarUrl?: string;
bio?: string;
company?: string;
location?: string;
name: string;
contributionsCollection: ContributionsCollection;
url: string;
};
}
export interface StartReviewResponse {
addPullRequestReview: {
pullRequestReview: {
id: string;
};
};
}
export interface StatusContext {
id: string;
state?: 'ERROR' | 'EXPECTED' | 'FAILURE' | 'PENDING' | 'SUCCESS';
description?: string;
context: string;
targetUrl?: string;
avatarUrl?: string;
}
export interface CheckRun {
id: string;
conclusion?:
| 'ACTION_REQUIRED'
| 'CANCELLED'
| 'FAILURE'
| 'NEUTRAL'
| 'SKIPPED'
| 'STALE'
| 'SUCCESS'
| 'TIMED_OUT';
name: string;
title?: string;
detailsUrl?: string;
checkSuite: {
app?: {
logoUrl: string;
url: string;
};
};
}
export function isCheckRun(x: CheckRun | StatusContext): x is CheckRun {
return (x as any).__typename === 'CheckRun';
}
export interface GetChecksResponse {
repository: {
pullRequest: {
commits: {
nodes: {
commit: {
statusCheckRollup?: {
state: string;
contexts: {
nodes: (StatusContext | CheckRun)[];
};
};
};
}[];
};
};
};
}
export interface ResolveReviewThreadResponse {
resolveReviewThread: {
thread: ReviewThread;
}
}
export interface UnresolveReviewThreadResponse {
unresolveReviewThread: {
thread: ReviewThread;
}
}
export interface PullRequestFilesResponse {
repository: {
pullRequest: {
files: {
nodes: {
path: string;
viewerViewedState: ViewedState
}[]
pageInfo: {
hasNextPage: boolean;
endCursor: string;
};
}
}
}
} | the_stack |
import * as msRest from "@azure/ms-rest-js";
import * as msRestAzure from "@azure/ms-rest-azure-js";
import * as Models from "../models";
import * as Mappers from "../models/frontDoorsMappers";
import * as Parameters from "../models/parameters";
import { FrontDoorManagementClientContext } from "../frontDoorManagementClientContext";
/** Class representing a FrontDoors. */
export class FrontDoors {
private readonly client: FrontDoorManagementClientContext;
/**
* Create a FrontDoors.
* @param {FrontDoorManagementClientContext} client Reference to the service client.
*/
constructor(client: FrontDoorManagementClientContext) {
this.client = client;
}
/**
* Lists all of the Front Doors within an Azure subscription.
* @param [options] The optional parameters
* @returns Promise<Models.FrontDoorsListResponse>
*/
list(options?: msRest.RequestOptionsBase): Promise<Models.FrontDoorsListResponse>;
/**
* @param callback The callback
*/
list(callback: msRest.ServiceCallback<Models.FrontDoorListResult>): void;
/**
* @param options The optional parameters
* @param callback The callback
*/
list(options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.FrontDoorListResult>): void;
list(options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.FrontDoorListResult>, callback?: msRest.ServiceCallback<Models.FrontDoorListResult>): Promise<Models.FrontDoorsListResponse> {
return this.client.sendOperationRequest(
{
options
},
listOperationSpec,
callback) as Promise<Models.FrontDoorsListResponse>;
}
/**
* Lists all of the Front Doors within a resource group under a subscription.
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param [options] The optional parameters
* @returns Promise<Models.FrontDoorsListByResourceGroupResponse>
*/
listByResourceGroup(resourceGroupName: string, options?: msRest.RequestOptionsBase): Promise<Models.FrontDoorsListByResourceGroupResponse>;
/**
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param callback The callback
*/
listByResourceGroup(resourceGroupName: string, callback: msRest.ServiceCallback<Models.FrontDoorListResult>): void;
/**
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param options The optional parameters
* @param callback The callback
*/
listByResourceGroup(resourceGroupName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.FrontDoorListResult>): void;
listByResourceGroup(resourceGroupName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.FrontDoorListResult>, callback?: msRest.ServiceCallback<Models.FrontDoorListResult>): Promise<Models.FrontDoorsListByResourceGroupResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
options
},
listByResourceGroupOperationSpec,
callback) as Promise<Models.FrontDoorsListByResourceGroupResponse>;
}
/**
* Gets a Front Door with the specified Front Door name under the specified subscription and
* resource group.
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param frontDoorName Name of the Front Door which is globally unique.
* @param [options] The optional parameters
* @returns Promise<Models.FrontDoorsGetResponse>
*/
get(resourceGroupName: string, frontDoorName: string, options?: msRest.RequestOptionsBase): Promise<Models.FrontDoorsGetResponse>;
/**
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param frontDoorName Name of the Front Door which is globally unique.
* @param callback The callback
*/
get(resourceGroupName: string, frontDoorName: string, callback: msRest.ServiceCallback<Models.FrontDoor>): void;
/**
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param frontDoorName Name of the Front Door which is globally unique.
* @param options The optional parameters
* @param callback The callback
*/
get(resourceGroupName: string, frontDoorName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.FrontDoor>): void;
get(resourceGroupName: string, frontDoorName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.FrontDoor>, callback?: msRest.ServiceCallback<Models.FrontDoor>): Promise<Models.FrontDoorsGetResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
frontDoorName,
options
},
getOperationSpec,
callback) as Promise<Models.FrontDoorsGetResponse>;
}
/**
* Creates a new Front Door with a Front Door name under the specified subscription and resource
* group.
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param frontDoorName Name of the Front Door which is globally unique.
* @param frontDoorParameters Front Door properties needed to create a new Front Door.
* @param [options] The optional parameters
* @returns Promise<Models.FrontDoorsCreateOrUpdateResponse>
*/
createOrUpdate(resourceGroupName: string, frontDoorName: string, frontDoorParameters: Models.FrontDoor, options?: msRest.RequestOptionsBase): Promise<Models.FrontDoorsCreateOrUpdateResponse> {
return this.beginCreateOrUpdate(resourceGroupName,frontDoorName,frontDoorParameters,options)
.then(lroPoller => lroPoller.pollUntilFinished()) as Promise<Models.FrontDoorsCreateOrUpdateResponse>;
}
/**
* Deletes an existing Front Door with the specified parameters.
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param frontDoorName Name of the Front Door which is globally unique.
* @param [options] The optional parameters
* @returns Promise<msRest.RestResponse>
*/
deleteMethod(resourceGroupName: string, frontDoorName: string, options?: msRest.RequestOptionsBase): Promise<msRest.RestResponse> {
return this.beginDeleteMethod(resourceGroupName,frontDoorName,options)
.then(lroPoller => lroPoller.pollUntilFinished());
}
/**
* Validates the custom domain mapping to ensure it maps to the correct Front Door endpoint in DNS.
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param frontDoorName Name of the Front Door which is globally unique.
* @param customDomainProperties Custom domain to be validated.
* @param [options] The optional parameters
* @returns Promise<Models.FrontDoorsValidateCustomDomainResponse>
*/
validateCustomDomain(resourceGroupName: string, frontDoorName: string, customDomainProperties: Models.ValidateCustomDomainInput, options?: msRest.RequestOptionsBase): Promise<Models.FrontDoorsValidateCustomDomainResponse>;
/**
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param frontDoorName Name of the Front Door which is globally unique.
* @param customDomainProperties Custom domain to be validated.
* @param callback The callback
*/
validateCustomDomain(resourceGroupName: string, frontDoorName: string, customDomainProperties: Models.ValidateCustomDomainInput, callback: msRest.ServiceCallback<Models.ValidateCustomDomainOutput>): void;
/**
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param frontDoorName Name of the Front Door which is globally unique.
* @param customDomainProperties Custom domain to be validated.
* @param options The optional parameters
* @param callback The callback
*/
validateCustomDomain(resourceGroupName: string, frontDoorName: string, customDomainProperties: Models.ValidateCustomDomainInput, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ValidateCustomDomainOutput>): void;
validateCustomDomain(resourceGroupName: string, frontDoorName: string, customDomainProperties: Models.ValidateCustomDomainInput, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ValidateCustomDomainOutput>, callback?: msRest.ServiceCallback<Models.ValidateCustomDomainOutput>): Promise<Models.FrontDoorsValidateCustomDomainResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
frontDoorName,
customDomainProperties,
options
},
validateCustomDomainOperationSpec,
callback) as Promise<Models.FrontDoorsValidateCustomDomainResponse>;
}
/**
* Creates a new Front Door with a Front Door name under the specified subscription and resource
* group.
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param frontDoorName Name of the Front Door which is globally unique.
* @param frontDoorParameters Front Door properties needed to create a new Front Door.
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginCreateOrUpdate(resourceGroupName: string, frontDoorName: string, frontDoorParameters: Models.FrontDoor, options?: msRest.RequestOptionsBase): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
resourceGroupName,
frontDoorName,
frontDoorParameters,
options
},
beginCreateOrUpdateOperationSpec,
options);
}
/**
* Deletes an existing Front Door with the specified parameters.
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param frontDoorName Name of the Front Door which is globally unique.
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginDeleteMethod(resourceGroupName: string, frontDoorName: string, options?: msRest.RequestOptionsBase): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
resourceGroupName,
frontDoorName,
options
},
beginDeleteMethodOperationSpec,
options);
}
/**
* Lists all of the Front Doors within an Azure subscription.
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param [options] The optional parameters
* @returns Promise<Models.FrontDoorsListNextResponse>
*/
listNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.FrontDoorsListNextResponse>;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param callback The callback
*/
listNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.FrontDoorListResult>): void;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param options The optional parameters
* @param callback The callback
*/
listNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.FrontDoorListResult>): void;
listNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.FrontDoorListResult>, callback?: msRest.ServiceCallback<Models.FrontDoorListResult>): Promise<Models.FrontDoorsListNextResponse> {
return this.client.sendOperationRequest(
{
nextPageLink,
options
},
listNextOperationSpec,
callback) as Promise<Models.FrontDoorsListNextResponse>;
}
/**
* Lists all of the Front Doors within a resource group under a subscription.
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param [options] The optional parameters
* @returns Promise<Models.FrontDoorsListByResourceGroupNextResponse>
*/
listByResourceGroupNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.FrontDoorsListByResourceGroupNextResponse>;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param callback The callback
*/
listByResourceGroupNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.FrontDoorListResult>): void;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param options The optional parameters
* @param callback The callback
*/
listByResourceGroupNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.FrontDoorListResult>): void;
listByResourceGroupNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.FrontDoorListResult>, callback?: msRest.ServiceCallback<Models.FrontDoorListResult>): Promise<Models.FrontDoorsListByResourceGroupNextResponse> {
return this.client.sendOperationRequest(
{
nextPageLink,
options
},
listByResourceGroupNextOperationSpec,
callback) as Promise<Models.FrontDoorsListByResourceGroupNextResponse>;
}
}
// Operation Specifications
const serializer = new msRest.Serializer(Mappers);
const listOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/providers/Microsoft.Network/frontDoors",
urlParameters: [
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion1
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.FrontDoorListResult
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const listByResourceGroupOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/frontDoors",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName
],
queryParameters: [
Parameters.apiVersion1
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.FrontDoorListResult
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const getOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/frontDoors/{frontDoorName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.frontDoorName
],
queryParameters: [
Parameters.apiVersion1
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.FrontDoor
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const validateCustomDomainOperationSpec: msRest.OperationSpec = {
httpMethod: "POST",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/frontDoors/{frontDoorName}/validateCustomDomain",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.frontDoorName
],
queryParameters: [
Parameters.apiVersion1
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "customDomainProperties",
mapper: {
...Mappers.ValidateCustomDomainInput,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.ValidateCustomDomainOutput
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const beginCreateOrUpdateOperationSpec: msRest.OperationSpec = {
httpMethod: "PUT",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/frontDoors/{frontDoorName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.frontDoorName
],
queryParameters: [
Parameters.apiVersion1
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "frontDoorParameters",
mapper: {
...Mappers.FrontDoor,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.FrontDoor
},
201: {
bodyMapper: Mappers.FrontDoor
},
202: {
bodyMapper: Mappers.FrontDoor
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const beginDeleteMethodOperationSpec: msRest.OperationSpec = {
httpMethod: "DELETE",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/frontDoors/{frontDoorName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.frontDoorName
],
queryParameters: [
Parameters.apiVersion1
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
202: {},
204: {},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const listNextOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
baseUrl: "https://management.azure.com",
path: "{nextLink}",
urlParameters: [
Parameters.nextPageLink
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.FrontDoorListResult
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const listByResourceGroupNextOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
baseUrl: "https://management.azure.com",
path: "{nextLink}",
urlParameters: [
Parameters.nextPageLink
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.FrontDoorListResult
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
}; | the_stack |
import {Branch, BranchEl, BranchHandler, LeafEl, NotebookList, NotebookListContextMenu} from "./notebooklist";
import {setProperty, StateHandler} from "../../state";
import {ServerMessageDispatcher} from "../../messaging/dispatcher";
import {fireEvent, queryByText, waitFor} from "@testing-library/dom";
import * as messages from "../../data/messages";
import {SocketSession} from "../../messaging/comms";
import {ServerMessageReceiver} from "../../messaging/receiver";
import {SocketStateHandler} from "../../state/socket_state";
import {ServerStateHandler} from "../../state/server_state";
jest.mock("../../messaging/comms");
const mockSocket = SocketSession.fromRelativeURL("notebookpath");
const socketHandler = SocketStateHandler.create(mockSocket);
const dispatcher = new ServerMessageDispatcher(socketHandler);
const receiver = new ServerMessageReceiver();
test('A LeafComponent should dispatch a LoadNotebook when clicked', done => {
const leaf = {
fullPath: "foo/bar/baz",
value: "baz"
};
const leafState = StateHandler.from(leaf);
const comp = new LeafEl(dispatcher, leafState);
const leafEl = () => comp.el.querySelector("a.name")!;
expect(leafEl()).toHaveAttribute('href', `notebooks/${leaf.fullPath}`);
const newPath = "foo/bar/baz2";
leafState.update(() => ({
fullPath: newPath,
value: "baz2"
}));
expect(leafEl()).toHaveAttribute('href', `notebooks/${newPath}`);
const spy = jest.spyOn(SocketSession, 'fromRelativeURL')
fireEvent(leafEl(), new MouseEvent('click'));
waitFor(() => {
expect(spy).toHaveBeenCalledWith('ws/' + encodeURIComponent(newPath))
}).then(() => {
expect(ServerStateHandler.state.currentNotebook).toEqual(newPath)
}).then(done)
});
describe("BranchComponent", () => {
const branchState = StateHandler.from<Branch>({
fullPath: "foo",
value: "foo",
children: {}
});
const branch = new BranchEl(dispatcher, branchState);
expect(branch.childrenEl).toBeEmptyDOMElement();
const leaf = {
fullPath: "bar",
value: "bar"
};
branchState.updateField("children", () => setProperty(leaf.fullPath, leaf))
test('is updated when its state changes', done => {
expect(branch.childrenEl).not.toBeEmptyDOMElement();
expect(branch.childrenEl).toHaveTextContent(leaf.value);
const newLeaf = {
fullPath: "baz",
value: "baz"
};
branchState.updateField("children", () => setProperty(leaf.fullPath, newLeaf))
expect(branch.childrenEl).toHaveTextContent(newLeaf.value);
expect(branch.el).not.toHaveClass("expanded");
fireEvent(branch.el, new MouseEvent('click'));
waitFor(() => {
expect(branch.el).toHaveClass("expanded")
}).then(done)
});
test('can trigger a notebook rename', done => {
const contextMenu = NotebookListContextMenu.get(dispatcher)
expect(contextMenu.el).not.toBeInTheDocument()
const leafEl = branch.childrenEl.children[0];
fireEvent(leafEl, new MouseEvent("contextmenu"))
waitFor(() => {
expect(contextMenu.el).toBeInTheDocument()
}).then(() => {
const dispatchRename = jest.spyOn(dispatcher, 'renameNotebook').mockImplementation((() => {}))
const rename = contextMenu.el.querySelector('.rename')!;
fireEvent(rename, new MouseEvent('click'))
return waitFor(() => {
expect(dispatchRename).toHaveBeenCalledWith(leaf.fullPath)
}).then(() => {
dispatchRename.mockRestore()
})
}).then(done)
})
test('can trigger a notebook deletion', done => {
const contextMenu = NotebookListContextMenu.get(dispatcher)
expect(contextMenu.el).not.toBeInTheDocument()
const leafEl = branch.childrenEl.children[0];
fireEvent(leafEl, new MouseEvent("contextmenu"))
waitFor(() => {
expect(contextMenu.el).toBeInTheDocument()
}).then(() => {
const mockDelete = jest.spyOn(dispatcher, 'deleteNotebook').mockImplementation((() => {}))
const del = contextMenu.el.querySelector('.delete')!;
fireEvent(del, new MouseEvent('click'))
return waitFor(() => {
expect(mockDelete).toHaveBeenCalledWith(leaf.fullPath)
}).then(() => {
mockDelete.mockRestore()
})
}).then(done)
})
})
test("A BranchHandler should build a tree out of paths", () => {
const root = {
fullPath: "",
value: "",
children: {}
};
const branchHandler = new BranchHandler(root);
const tree = new BranchEl(dispatcher, branchHandler);
// first add some notebooks at root, easy peasy.
const simpleNBs = ["foo.ipynb", "bar.ipynb", "baz.ipynb"];
simpleNBs.forEach(nb => branchHandler.addPath(nb));
expect(Object.values(branchHandler.state.children)).toEqual([
{fullPath: "foo.ipynb", value: "foo.ipynb"},
{fullPath: "bar.ipynb", value: "bar.ipynb"},
{fullPath: "baz.ipynb", value: "baz.ipynb"},
]);
expect(tree.el.children).toHaveLength(3);
// next we will add a few directories
const dirNBs = ["dir/one.ipynb", "dir/two.ipynb", "dir2/three.ipynb", "dir/four.ipynb"];
dirNBs.forEach(nb => branchHandler.addPath(nb));
expect(Object.values(branchHandler.state.children)).toEqual([
{fullPath: "foo.ipynb", value: "foo.ipynb"},
{fullPath: "bar.ipynb", value: "bar.ipynb"},
{fullPath: "baz.ipynb", value: "baz.ipynb"},
{fullPath: "dir", value: "dir", children: {
"dir/one.ipynb": {fullPath: "dir/one.ipynb", value: "one.ipynb"},
"dir/two.ipynb": {fullPath: "dir/two.ipynb", value: "two.ipynb"},
"dir/four.ipynb": {fullPath: "dir/four.ipynb", value: "four.ipynb"},
}},
{fullPath: "dir2", value: "dir2", children: {
"dir2/three.ipynb": {fullPath: "dir2/three.ipynb", value: "three.ipynb"},
}}
]);
expect(tree.el.children).toHaveLength(5);
const branches = tree.el.querySelectorAll(".branch");
expect(branches).toHaveLength(2);
// TODO it would be nice if there was a better selector utility for this. Probably using the wrong API or something
const dir = [...branches].find((b: HTMLElement) => queryByText(b, "dir"))!.querySelector("ul")!;
const dir2 = [...branches].find((b: HTMLElement) => queryByText(b, "dir2"))!.querySelector("ul")!;
expect(dir.children).toHaveLength(3);
expect(dir2.children).toHaveLength(1);
// next let's go nuts with some nested notebooks!
branchHandler.addPath("dir/another.ipynb");
branchHandler.addPath("dir/newdir/more.ipynb");
branchHandler.addPath("dir/newdir/newer/even_more.ipynb");
branchHandler.addPath("dir/1/2/3/4/surprisinglydeep.ipynb");
branchHandler.addPath("dir/1/2/oh_my.ipynb");
branchHandler.addPath("path/to/my/notebook.ipynb");
expect(branchHandler.state.children).toEqual({
"foo.ipynb": {fullPath: "foo.ipynb", value: "foo.ipynb"},
"bar.ipynb": {fullPath: "bar.ipynb", value: "bar.ipynb"},
"baz.ipynb": {fullPath: "baz.ipynb", value: "baz.ipynb"},
"dir": {fullPath: "dir", value: "dir", children: {
"dir/one.ipynb": {fullPath: "dir/one.ipynb", value: "one.ipynb"},
"dir/two.ipynb": {fullPath: "dir/two.ipynb", value: "two.ipynb"},
"dir/four.ipynb": {fullPath: "dir/four.ipynb", value: "four.ipynb"},
"dir/another.ipynb": {fullPath: "dir/another.ipynb", value: "another.ipynb"},
"dir/newdir": {fullPath: "dir/newdir", value: "newdir", children: {
"dir/newdir/more.ipynb": {fullPath: "dir/newdir/more.ipynb", value: "more.ipynb"},
"dir/newdir/newer": {fullPath: "dir/newdir/newer", value: "newer", children: {
"dir/newdir/newer/even_more.ipynb": {fullPath: "dir/newdir/newer/even_more.ipynb", value: "even_more.ipynb"},
}},
}},
"dir/1": {fullPath: "dir/1", value: "1", children: {
"dir/1/2": {fullPath: "dir/1/2", value: "2", children: {
"dir/1/2/3": {fullPath: "dir/1/2/3", value: "3", children: {
"dir/1/2/3/4": {fullPath: "dir/1/2/3/4", value: "4", children: {
"dir/1/2/3/4/surprisinglydeep.ipynb": {fullPath: "dir/1/2/3/4/surprisinglydeep.ipynb", value: "surprisinglydeep.ipynb"},
}},
}},
"dir/1/2/oh_my.ipynb": {fullPath: "dir/1/2/oh_my.ipynb", value: "oh_my.ipynb"},
}},
}},
}},
"dir2": {fullPath: "dir2", value: "dir2", children: {
"dir2/three.ipynb": {fullPath: "dir2/three.ipynb", value: "three.ipynb"},
}},
"path": {fullPath: "path", value: "path", children: {
"path/to": {fullPath: "path/to", value: "to", children: {
"path/to/my": {fullPath: "path/to/my", value: "my", children: {
"path/to/my/notebook.ipynb": {fullPath: "path/to/my/notebook.ipynb", value: "notebook.ipynb"},
}},
}},
}},
});
expect(tree.el.outerHTML).toMatchSnapshot()
});
test("stress test", () => {
const root = {
fullPath: "",
value: "",
children: {}
};
const branchHandler = new BranchHandler(root);
const comp = new BranchEl(dispatcher, branchHandler);
expect(branchHandler.state).toMatchSnapshot();
const max = 300;
[...Array(max).keys()].map(x => {
let path = `root/${x}`;
if (x % 10) {
path = `dir/${path}`
}
if (x % 20) {
path = `dir2/${path}`
}
return path
}).forEach(p => {
branchHandler.addPath(p)
});
expect(branchHandler.state).toMatchSnapshot();
});
test("NotebookList e2e test", done => {
const nbList = new NotebookList(dispatcher);
expect(mockSocket.send).toHaveBeenCalledWith(new messages.ListNotebooks([])); // gets called when the notebook list is initialized.
expect(nbList.el.querySelector('.tree-view > ul')).toBeEmptyDOMElement();
// this will trigger the receiver to update global state
const paths = [...Array(500).keys()].map(x => {
let path = `root/${x}`;
if (x % 10) {
path = `dir/${path}`
}
if (x % 20) {
path = `dir2/${path}`
}
return path
});
SocketSession.global.send(new messages.ListNotebooks(paths));
waitFor(() => {
expect(nbList.el.querySelector('.tree-view > ul')).not.toBeEmptyDOMElement();
}).then(() => {
expect(nbList.el.outerHTML).toMatchSnapshot()
})
.then(() => {
const path = `notebooks/${paths[0]}`;
expect(nbList.el.querySelector(`[href='${path}']`)).not.toBeEmptyDOMElement()
SocketSession.global.send(new messages.DeleteNotebook(paths[0]))
waitFor(() => {
expect(nbList.el.querySelector(`[href='${path}']`)).toBeNull()
}).then(done)
})
})
// TODO test rename, delete | the_stack |
import { useState, useEffect, useRef } from 'react'
import CSSModules from 'react-css-modules'
import styles from '../CreateWallet.scss'
import reducers from 'redux/core/reducers'
import { FormattedMessage, injectIntl } from 'react-intl'
import { isMobile } from 'react-device-detect'
import actions from 'redux/actions'
import { constants } from 'helpers'
import feedback from 'shared/helpers/feedback'
import Explanation from '../Explanation'
import icons from '../images'
import Cupture, {
subHeaderText1,
subHeaderText2,
cupture2,
} from './texts'
import Button from 'components/controls/Button/Button'
const SecondStep = (props) => {
const {
intl: { locale },
onClick,
currencies,
error,
setError,
forcedCurrencyData,
btcData,
} = props
const _protection = {
pin: {
btc: true,
},
g2fa: {},
multisign: {},
fingerprint: {},
}
const _activated = {
nothing: {},
pin: {},
g2fa: {},
multisign: {},
fingerprint: {},
}
const { hiddenCoinsList } = constants.localStorage
//@ts-ignore: strictNullChecks
const hiddenCoins = JSON.parse(localStorage.getItem(hiddenCoinsList))
if (currencies.BTC) {
//@ts-ignore
_protection.pin.btc = true
//@ts-ignore
_protection.g2fa.btc = false
//@ts-ignore
_protection.multisign.btc = true
//@ts-ignore
_protection.fingerprint.btc = true
//@ts-ignore
_activated.nothing.btc = btcData.balance > 0 || (hiddenCoins.length ? !hiddenCoins.includes('BTC') && !hiddenCoins.includes(`BTC:${btcData.address}`) : false)
//@ts-ignore
_activated.pin.btc = actions.btcmultisig.checkPINActivated()
//@ts-ignore
_activated.g2fa.btc = actions.btcmultisig.checkG2FAActivated()
//@ts-ignore
_activated.multisign.btc = actions.btcmultisig.checkUserActivated()
//@ts-ignore
_activated.fingerprint.btc = false
}
const [border, setBorder] = useState({
color: {
withoutSecure: false,
pin: false,
google2FA: false,
multisignature: false,
},
selected: '',
})
const [isFingerprintAvailable, setFingerprintAvaillable] = useState(false)
const thisComponentInitHelper = useRef(true)
const [isFingerprintFeatureAsked, setFingerprintFeatureAsked] = useState(false)
const [isTrivialFeatureAsked, setTrivialFeatureAsked] = useState(false)
const [isPinFeatureAsked, setPinFeatureAsked] = useState(false)
const [is2FAFeatureAsked, set2FAFeatureAsked] = useState(false)
const [isMultisigFeatureAsked, setMultisigFeatureAsked] = useState(false)
useEffect(() => {
try {
//@ts-ignore
if (typeof PublicKeyCredential !== 'undefined') {
// eslint-disable-next-line no-undef
//@ts-ignore
if (thisComponentInitHelper.current && PublicKeyCredential) {
// eslint-disable-next-line no-undef
//@ts-ignore
PublicKeyCredential
.isUserVerifyingPlatformAuthenticatorAvailable()
.then(result => {
if (result) {
setFingerprintAvaillable(true)
}
})
.catch(e => console.error(e))
}
}
} catch (error) {
console.error(error)
}
})
const handleFinish = () => {
if (currencies.BTC) {
feedback.createWallet.finished('BTC')
} else {
feedback.createWallet.finished()
}
onClick()
// onCreateTrigger()
}
const handleClick = (index, el) => {
const { name, enabled, activated } = el
if (el.name === 'fingerprint') {
// eslint-disable-next-line no-alert
alert('We don\'t support this type of device for now :(')
return null
}
if (!enabled) {
return
}
// if (activated) return
const colors = border.color
Object.keys(border.color).forEach(el => {
if (el !== name) {
colors[el] = false
} else {
colors[el] = true
}
})
setBorder({ color: colors, selected: name })
reducers.createWallet.newWalletData({ type: 'secure', data: name })
setError(null)
}
const currencyName = Object.keys(currencies).filter((el) => currencies[el])[0] || 'Cant define currency'
const currencyKey = currencyName.toLowerCase()
const coins = [
{
text: {
en: 'No security',
ru: 'Без защиты',
nl: 'Geen beveliging',
es: 'Sin seguridad',
pl: 'Brak ochrony',
}[locale],
name: 'withoutSecure',
capture: {
en: 'Suitable for small amounts',
ru: 'Подходит для небольших сумм',
nl: 'Geschikt voor kleine bedragen',
es: 'Apto para pequeñas cantidades',
pl: 'Nadaje się do małych ilości',
}[locale],
enabled: !_activated.nothing[currencyKey],
activated: _activated.nothing[currencyKey],
onClickHandler: () => {
if (isTrivialFeatureAsked) {
return null
}
setTrivialFeatureAsked(true)
feedback.createWallet.securitySelected(`${currencyName}-normal`)
},
},
{
text: 'PIN',
name: 'pin',
capture: {
en: 'Verify your transactions via PIN code',
ru: 'Транзакции подтверждаются PIN-кодом',
nl: 'Verifieer uw transacties via PIN code',
es: 'Verifique sus transacciones mediante el código PIN',
pl: 'Zweryfikuj swoje transakcje za pomocą kodu PIN',
}[locale],
enabled: _protection.pin[currencyKey],
activated: _activated.pin[currencyKey],
onClickHandler: () => {
if (isPinFeatureAsked) {
return null
}
setPinFeatureAsked(true)
feedback.createWallet.securitySelected(`${currencyName}-pin`)
},
},
/*
{
text: 'Google 2FA',
name: 'google2FA',
capture: locale === 'en' ?
'Verify your transactions through the Google Authenticator app' :
'Транзакции подтверждаются через приложение Google Authenticator',
enabled: _protection.g2fa.btc,
activated: _activated.g2fa.btc,
onClickHandler: () => {
if (is2FAFeatureAsked) {
return null
}
set2FAFeatureAsked(true)
feedback.createWallet.securitySelected(`${currencyName}-2fa`)
},
},
*/
{
text: 'Multisignature',
name: 'multisignature',
capture: {
en: 'Verify your transactions by using another device or by another person',
ru: 'Транзакции подтверждаются с другого устройства и/или другим человеком',
nl: 'Verifieer uw transacties met een ander apparaat of persoon',
es: 'Verifique sus transacciones usando otro dispositivo o por otra persona',
pl: 'Zweryfikuj swoje transakcje za pomocą innego urządzenia lub innej osoby',
}[locale],
enabled: _protection.multisign[currencyKey],
activated: _activated.multisign[currencyKey],
onClickHandler: () => {
if (isMultisigFeatureAsked) {
return null
}
setMultisigFeatureAsked(true)
feedback.createWallet.securitySelected(`${currencyName}-multisig`)
},
},
]
if (isFingerprintAvailable) {
coins.push({
text: 'Fingerprint',
name: 'fingerprint',
capture: {
en: 'Transactions are confirmed with your fingerprint authenticator',
ru: 'Транзакции подтверждаются с помощью считывателя отпечатков пальцев',
nl: 'Transacties bevestigd met uw vingerprint authenticator',
es: 'Las transacciones se confirman con su autenticador de huellas digitales',
pl: 'Transakcje są potwierdzane za pomocą Twojego czytnika linii papilarnych',
}[locale],
enabled: _protection.fingerprint[currencyKey],
activated: _activated.fingerprint[currencyKey],
onClickHandler: () => {
if (isFingerprintFeatureAsked) {
return null
}
setFingerprintFeatureAsked(true)
feedback.createWallet.securitySelected(`${currencyName}-fingerprint`)
},
})
}
return (
<div>
{!isMobile && !forcedCurrencyData &&
<div>
<Explanation subHeaderText={subHeaderText1()} step={1} notMain>
<Cupture />
</Explanation>
</div>
}
<div>
<div>
<Explanation subHeaderText={subHeaderText2()} step={2} isShow={forcedCurrencyData}>
{cupture2()}
</Explanation>
<div styleName="currencyChooserWrapper">
{coins.map((el, index) => {
const { name, capture, text, enabled, activated } = el
const cardStyle = ['card', 'secureSize', 'thirdCard']
if (border.color[name] && enabled) cardStyle.push('purpleBorder')
if (!enabled) cardStyle.push('cardDisabled')
if (activated) cardStyle.push('cardActivated')
const cardStyle_ = cardStyle.join(' ')
return (
<div
key={index}
styleName={`${cardStyle_}`}
id={name}
onClick={() => {
if (typeof el.onClickHandler !== 'undefined') { el.onClickHandler() }
return handleClick(index, el)
}}
>
<div styleName="ind">
{(!enabled || activated) &&
<em>
{!activated && <FormattedMessage id="createWalletSoon" defaultMessage="Soon!" />}
{activated && <FormattedMessage id="createWalletActivated" defaultMessage="Activated!" />}
</em>
}
</div>
<div styleName="flex">
<div styleName="logo securityIcon">
<img
src={icons[name]}
alt={`${name} icon`}
role="image"
/>
</div>
<ul styleName="currencyInfoList">
<li>
<b>{text}</b>
</li>
<li>{capture}</li>
</ul>
</div>
</div>
)
})}
</div>
</div>
<Button
id="createWalletBtn"
styleName="stepButton"
disabled={error || border.selected === '' || border.selected === 'fingerprint'}
onClick={handleFinish}
>
<FormattedMessage id="createWalletButton3" defaultMessage="Create Wallet" />
</Button>
</div>
</div>
)
}
export default injectIntl(CSSModules(SecondStep, styles, { allowMultiple: true })) | the_stack |
import { leftPadString } from '../utils/data';
import BitCore from 'bitcore-lib';
import Mnemonic from 'bitcore-mnemonic';
import Nacl from 'tweetnacl';
import NaclUtil from 'tweetnacl-util';
import ScryptAsync from 'scrypt-async';
import * as filecoin_signer from '@zondax/filecoin-signing-tools';
export class Keystore {
public salt!: string;
public hdPathString!: string;
public encSeed!: { encStr: any; nonce: any; } | undefined;
public version = 1;
public hdIndex = 0;
public encPrivKeys!: any;
public addresses!: string[];
private defaultAddressIndex: number = 0;
public serialize() {
return JSON.stringify({
encSeed: this.encSeed,
addresses: this.addresses,
encPrivKeys: this.encPrivKeys,
hdPathString: this.hdPathString,
salt: this.salt,
hdIndex: this.hdIndex,
version: this.version,
});
};
public deserialize(keystore: string) {
const dataKS = JSON.parse(keystore);
const { version, salt, encSeed, encPrivKeys, hdIndex, hdPathString, addresses } = dataKS;
this.salt = salt;
this.hdPathString = hdPathString;
this.encSeed = encSeed;
this.version = version;
this.hdIndex = hdIndex;
this.encPrivKeys = encPrivKeys;
this.addresses = addresses;
};
public init(mnemonic: string, pwDerivedKey: Uint8Array, hdPathString: string, salt: string) {
this.salt = salt;
const pathParts = hdPathString.split('/');
if (pathParts.length === 6) {
const hdPathIndex = pathParts.splice(pathParts.length - 1, 1);
this.hdIndex = parseInt(hdPathIndex[0].replace("'", ""));
}
this.hdPathString = pathParts.join('/');
this.encSeed = undefined;
this.encPrivKeys = {};
this.addresses = [];
if ((typeof pwDerivedKey !== 'undefined') && (typeof mnemonic !== 'undefined')) {
const words = mnemonic.split(' ');
if (!this.isSeedValid(mnemonic) || words.length !== 12) {
throw new Error('KeyStore: Invalid mnemonic');
}
// Pad the seed to length 120 before encrypting
const paddedSeed = leftPadString(mnemonic, ' ', 120);
this.encSeed = this._encryptString(paddedSeed, pwDerivedKey);
this.generateNewAddress(pwDerivedKey, 1);
}
}
public async createVault(opts: any) {
const { hdPathString, seedPhrase, password } = opts;
let salt = opts.salt;
// Default hdPathString
if (!hdPathString) {
const err = new Error("Keystore: Must include hdPathString in createVault inputs. Suggested value m/44'/461'/0/0/1");
return err;
}
if (!seedPhrase) {
const err = new Error('Keystore: Must include seedPhrase in createVault inputs.');
return err;
}
if (!salt) {
salt = this.generateSalt(32);
}
const pwDerivedKey: Uint8Array = await this.deriveKeyFromPasswordAndSalt(password, salt);
this.init(seedPhrase, pwDerivedKey, hdPathString, salt);
return null;
};
private generateSalt(byteCount: number) {
return BitCore.crypto.Random.getRandomBuffer(byteCount || 32).toString('base64');
};
private isSeedValid(seed: string) {
return Mnemonic.isValid(seed, Mnemonic.Words.ENGLISH);
};
private _encryptString(string: string, pwDerivedKey: Uint8Array) {
const nonce = Nacl.randomBytes(Nacl.secretbox.nonceLength);
const encStr = Nacl.secretbox(NaclUtil.decodeUTF8(string), nonce, pwDerivedKey);
return {
encStr: NaclUtil.encodeBase64(encStr),
nonce: NaclUtil.encodeBase64(nonce),
};
};
private _decryptString(encryptedStr: any, pwDerivedKey: Uint8Array) {
const decStr = NaclUtil.decodeBase64(encryptedStr.encStr);
const nonce = NaclUtil.decodeBase64(encryptedStr.nonce);
const decryptedStr = Nacl.secretbox.open(decStr, nonce, pwDerivedKey);
if (decryptedStr === null) {
return false;
}
return NaclUtil.encodeUTF8(decryptedStr);
};
private encodeHex(msgUInt8Arr: any) {
const msgBase64 = NaclUtil.encodeBase64(msgUInt8Arr);
return (Buffer.from(msgBase64, 'base64')).toString('hex');
}
private decodeHex(msgHex: any) {
const msgBase64 = (Buffer.from(msgHex, 'hex')).toString('base64');
return NaclUtil.decodeBase64(msgBase64);
}
private _encryptKey(privateKey: string, pwDerivedKey: Uint8Array) {
const nonce = Nacl.randomBytes(Nacl.secretbox.nonceLength);
const privateKeyArray = this.decodeHex(privateKey);
const encKey = Nacl.secretbox(privateKeyArray, nonce, pwDerivedKey);
return {
key: NaclUtil.encodeBase64(encKey),
nonce: NaclUtil.encodeBase64(nonce),
};
};
private _decryptKey(encryptedKey: any, pwDerivedKey: Uint8Array) {
const decKey = NaclUtil.decodeBase64(encryptedKey.key);
const nonce = NaclUtil.decodeBase64(encryptedKey.nonce);
const decryptedKey = Nacl.secretbox.open(decKey, nonce, pwDerivedKey);
if (decryptedKey === null) {
throw new Error('Decryption failed!');
}
return this.encodeHex(decryptedKey);
};
private async deriveKeyFromPasswordAndSalt(password: string, salt: string): Promise<Uint8Array> {
return new Promise((resolve, reject) => {
const logN = 14;
const r = 8;
const dkLen = 16;
const cb = function (derKey: any) {
let err = null;
let ui8arr = undefined;
try {
ui8arr = Uint8Array.from(derKey);
} catch (e) {
reject(e);
}
resolve(ui8arr);
};
ScryptAsync(password, salt, {
logN,
r,
p: 1,
dkLen,
encoding: 'hex'
}, cb);
});
};
private generateNewAddress(pwDerivedKey: Uint8Array, n: number) {
//Assert.derivedKey(this, pwDerivedKey);
if (!this.encSeed) {
throw new Error('KeyStore.generateNewAddress: No seed set');
}
n = n || 1;
const keys = this._generatePrivKeys(pwDerivedKey, n);
for (let i = 0; i < n; i++) {
const keyObj = keys[i];
const address: string = keyObj.privKey.address;
this.encPrivKeys[address] = keyObj.encPrivKey;
this.addresses.push(address);
}
this.hdIndex += n;
};
async newAddress(n: number, password: string) {
const pwDerivedKey: Uint8Array = await this.deriveKeyFromPasswordAndSalt(password, this.salt);
this.generateNewAddress(pwDerivedKey, n);
};
async deleteAddress(address: string, password: string) {
const addressIndex = this.addresses.indexOf(address);
if (addressIndex >= 0) {
const pwDerivedKey: Uint8Array = await this.deriveKeyFromPasswordAndSalt(password, this.salt);
const encPrivateKey = this.encPrivKeys[address];
if (this._decryptKey(encPrivateKey, pwDerivedKey)) {
this.addresses[addressIndex] = '';
this.encPrivKeys[address] = '';
if (this.defaultAddressIndex === addressIndex) {
const addresses = await this.getAddresses();
if (addresses.length > 0) {
await this.setDefaultAddress(addresses[0]);
}
}
};
}
}
private _generatePrivKeys(pwDerivedKey: Uint8Array, n: number) {
//Assert.derivedKey(this, pwDerivedKey);
const seed = this._decryptString(this.encSeed, pwDerivedKey);
if (!seed || seed.length === 0) {
throw new Error('Provided password derived key is wrong');
}
const keys = [];
for (let i = 0; i < n; i++) {
const key = filecoin_signer.keyDerive(seed, `${this.hdPathString}/${i + this.hdIndex}`, '');
const encPrivateKey = this._encryptKey(key.private_hexstring, pwDerivedKey);
keys[i] = {
privKey: key,
encPrivKey: encPrivateKey
};
}
return keys;
};
async getPrivateKey(address: string, password: string) {
const pwDerivedKey: Uint8Array = await this.deriveKeyFromPasswordAndSalt(password, this.salt);
if (this.encPrivKeys[address] === undefined) {
throw new Error('KeyStore.exportPrivateKey: Address not found in KeyStore');
}
const encPrivateKey = this.encPrivKeys[address];
return this._decryptKey(encPrivateKey, pwDerivedKey);
};
async getDefaultAddress(): Promise<string> {
return this.addresses[this.defaultAddressIndex];
}
async setDefaultAddress(address: string): Promise<void> {
const addressIndex = this.addresses.indexOf(address);
if (addressIndex >= 0) {
this.defaultAddressIndex = addressIndex;
}
}
public async getAddresses(): Promise<string[]> {
return this.addresses.filter((a, i) => { return a != '' });
}
async hasAddress(address: string): Promise<boolean> {
return this.addresses.indexOf(address) >= 0;
}
generateRandomSeed(extraEntropy?: any) {
let seed = '';
if (extraEntropy === undefined) {
seed = new Mnemonic(Mnemonic.Words.ENGLISH);
} else if (typeof extraEntropy === 'string') {
const entBuf = Buffer.from(extraEntropy);
const randBuf = BitCore.crypto.Random.getRandomBuffer(256 / 8);
const hashedEnt = this._concatAndSha256(randBuf, entBuf).slice(0, 128 / 8);
seed = new Mnemonic(hashedEnt, Mnemonic.Words.ENGLISH);
} else {
throw new Error('generateRandomSeed: extraEntropy is set but not a string.');
}
return seed.toString();
};
_concatAndSha256 = function (entropyBuf0: any, entropyBuf1: any) {
const totalEnt = Buffer.concat([entropyBuf0, entropyBuf1]);
if (totalEnt.length !== entropyBuf0.length + entropyBuf1.length) {
throw new Error('generateRandomSeed: Logic error! Concatenation of entropy sources failed.');
}
return BitCore.crypto.Hash.sha256(totalEnt);
};
} | the_stack |
import { MetadataBearer as $MetadataBearer, SmithyException as __SmithyException } from "@aws-sdk/types";
/**
* <p>Information about the <code>AccessLog</code> attribute.</p>
*/
export interface AccessLog {
/**
* <p>Specifies whether access logs are enabled for the load balancer.</p>
*/
Enabled: boolean | undefined;
/**
* <p>The name of the Amazon S3 bucket where the access logs are stored.</p>
*/
S3BucketName?: string;
/**
* <p>The interval for publishing the access logs. You can specify an interval of either 5 minutes or 60 minutes.</p>
* <p>Default: 60 minutes</p>
*/
EmitInterval?: number;
/**
* <p>The logical hierarchy you created for your Amazon S3 bucket, for example <code>my-bucket-prefix/prod</code>.
* If the prefix is not provided, the log is placed at the root level of the bucket.</p>
*/
S3BucketPrefix?: string;
}
export namespace AccessLog {
/**
* @internal
*/
export const filterSensitiveLog = (obj: AccessLog): any => ({
...obj,
});
}
/**
* <p>The specified load balancer does not exist.</p>
*/
export interface AccessPointNotFoundException extends __SmithyException, $MetadataBearer {
name: "AccessPointNotFoundException";
$fault: "client";
Message?: string;
}
export namespace AccessPointNotFoundException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: AccessPointNotFoundException): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for EnableAvailabilityZonesForLoadBalancer.</p>
*/
export interface AddAvailabilityZonesInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The Availability Zones. These must be in the same region as the load balancer.</p>
*/
AvailabilityZones: string[] | undefined;
}
export namespace AddAvailabilityZonesInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: AddAvailabilityZonesInput): any => ({
...obj,
});
}
/**
* <p>Contains the output of EnableAvailabilityZonesForLoadBalancer.</p>
*/
export interface AddAvailabilityZonesOutput {
/**
* <p>The updated list of Availability Zones for the load balancer.</p>
*/
AvailabilityZones?: string[];
}
export namespace AddAvailabilityZonesOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: AddAvailabilityZonesOutput): any => ({
...obj,
});
}
/**
* <p>Information about additional load balancer attributes.</p>
*/
export interface AdditionalAttribute {
/**
* <p>The name of the attribute.</p>
* <p>The following attribute is supported.</p>
* <ul>
* <li>
* <p>
* <code>elb.http.desyncmitigationmode</code> - Determines how the load balancer handles requests that
* might pose a security risk to your application. The possible values are <code>monitor</code>,
* <code>defensive</code>, and <code>strictest</code>. The default is <code>defensive</code>.</p>
* </li>
* </ul>
*/
Key?: string;
/**
* <p>This value of the attribute.</p>
*/
Value?: string;
}
export namespace AdditionalAttribute {
/**
* @internal
*/
export const filterSensitiveLog = (obj: AdditionalAttribute): any => ({
...obj,
});
}
/**
* <p>Information about a tag.</p>
*/
export interface Tag {
/**
* <p>The key of the tag.</p>
*/
Key: string | undefined;
/**
* <p>The value of the tag.</p>
*/
Value?: string;
}
export namespace Tag {
/**
* @internal
*/
export const filterSensitiveLog = (obj: Tag): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for AddTags.</p>
*/
export interface AddTagsInput {
/**
* <p>The name of the load balancer. You can specify one load balancer only.</p>
*/
LoadBalancerNames: string[] | undefined;
/**
* <p>The tags.</p>
*/
Tags: Tag[] | undefined;
}
export namespace AddTagsInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: AddTagsInput): any => ({
...obj,
});
}
/**
* <p>Contains the output of AddTags.</p>
*/
export interface AddTagsOutput {}
export namespace AddTagsOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: AddTagsOutput): any => ({
...obj,
});
}
/**
* <p>A tag key was specified more than once.</p>
*/
export interface DuplicateTagKeysException extends __SmithyException, $MetadataBearer {
name: "DuplicateTagKeysException";
$fault: "client";
Message?: string;
}
export namespace DuplicateTagKeysException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DuplicateTagKeysException): any => ({
...obj,
});
}
/**
* <p>The quota for the number of tags that can be assigned to a load balancer has been reached.</p>
*/
export interface TooManyTagsException extends __SmithyException, $MetadataBearer {
name: "TooManyTagsException";
$fault: "client";
Message?: string;
}
export namespace TooManyTagsException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: TooManyTagsException): any => ({
...obj,
});
}
/**
* <p>Information about a policy for application-controlled session stickiness.</p>
*/
export interface AppCookieStickinessPolicy {
/**
* <p>The mnemonic name for the policy being created. The name must be unique within a set of policies for this load balancer.</p>
*/
PolicyName?: string;
/**
* <p>The name of the application cookie used for stickiness.</p>
*/
CookieName?: string;
}
export namespace AppCookieStickinessPolicy {
/**
* @internal
*/
export const filterSensitiveLog = (obj: AppCookieStickinessPolicy): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for ApplySecurityGroupsToLoadBalancer.</p>
*/
export interface ApplySecurityGroupsToLoadBalancerInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The IDs of the security groups to associate with the load balancer. Note that you cannot specify the name of the security group.</p>
*/
SecurityGroups: string[] | undefined;
}
export namespace ApplySecurityGroupsToLoadBalancerInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: ApplySecurityGroupsToLoadBalancerInput): any => ({
...obj,
});
}
/**
* <p>Contains the output of ApplySecurityGroupsToLoadBalancer.</p>
*/
export interface ApplySecurityGroupsToLoadBalancerOutput {
/**
* <p>The IDs of the security groups associated with the load balancer.</p>
*/
SecurityGroups?: string[];
}
export namespace ApplySecurityGroupsToLoadBalancerOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: ApplySecurityGroupsToLoadBalancerOutput): any => ({
...obj,
});
}
/**
* <p>The requested configuration change is not valid.</p>
*/
export interface InvalidConfigurationRequestException extends __SmithyException, $MetadataBearer {
name: "InvalidConfigurationRequestException";
$fault: "client";
Message?: string;
}
export namespace InvalidConfigurationRequestException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: InvalidConfigurationRequestException): any => ({
...obj,
});
}
/**
* <p>One or more of the specified security groups do not exist.</p>
*/
export interface InvalidSecurityGroupException extends __SmithyException, $MetadataBearer {
name: "InvalidSecurityGroupException";
$fault: "client";
Message?: string;
}
export namespace InvalidSecurityGroupException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: InvalidSecurityGroupException): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for AttachLoaBalancerToSubnets.</p>
*/
export interface AttachLoadBalancerToSubnetsInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The IDs of the subnets to add. You can add only one subnet per Availability Zone.</p>
*/
Subnets: string[] | undefined;
}
export namespace AttachLoadBalancerToSubnetsInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: AttachLoadBalancerToSubnetsInput): any => ({
...obj,
});
}
/**
* <p>Contains the output of AttachLoadBalancerToSubnets.</p>
*/
export interface AttachLoadBalancerToSubnetsOutput {
/**
* <p>The IDs of the subnets attached to the load balancer.</p>
*/
Subnets?: string[];
}
export namespace AttachLoadBalancerToSubnetsOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: AttachLoadBalancerToSubnetsOutput): any => ({
...obj,
});
}
/**
* <p>The specified VPC has no associated Internet gateway.</p>
*/
export interface InvalidSubnetException extends __SmithyException, $MetadataBearer {
name: "InvalidSubnetException";
$fault: "client";
Message?: string;
}
export namespace InvalidSubnetException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: InvalidSubnetException): any => ({
...obj,
});
}
/**
* <p>One or more of the specified subnets do not exist.</p>
*/
export interface SubnetNotFoundException extends __SmithyException, $MetadataBearer {
name: "SubnetNotFoundException";
$fault: "client";
Message?: string;
}
export namespace SubnetNotFoundException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: SubnetNotFoundException): any => ({
...obj,
});
}
/**
* <p>Information about the configuration of an EC2 instance.</p>
*/
export interface BackendServerDescription {
/**
* <p>The port on which the EC2 instance is listening.</p>
*/
InstancePort?: number;
/**
* <p>The names of the policies enabled for the EC2 instance.</p>
*/
PolicyNames?: string[];
}
export namespace BackendServerDescription {
/**
* @internal
*/
export const filterSensitiveLog = (obj: BackendServerDescription): any => ({
...obj,
});
}
/**
* <p>The specified ARN does not refer to a valid SSL certificate in AWS Identity and Access Management (IAM)
* or AWS Certificate Manager (ACM). Note that if you recently uploaded the certificate to IAM, this error might
* indicate that the certificate is not fully available yet.</p>
*/
export interface CertificateNotFoundException extends __SmithyException, $MetadataBearer {
name: "CertificateNotFoundException";
$fault: "client";
Message?: string;
}
export namespace CertificateNotFoundException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: CertificateNotFoundException): any => ({
...obj,
});
}
/**
* <p>Information about a health check.</p>
*/
export interface HealthCheck {
/**
* <p>The instance being checked. The protocol is either TCP, HTTP, HTTPS, or SSL. The range of valid ports is one (1) through 65535.</p>
* <p>TCP is the default, specified as a TCP: port pair, for example "TCP:5000". In this case, a health check simply attempts to open a TCP connection to the instance on the specified port. Failure to connect within the configured timeout is considered unhealthy.</p>
* <p>SSL is also specified as SSL: port pair, for example, SSL:5000.</p>
* <p>For HTTP/HTTPS, you must include a ping path in the string. HTTP is specified as a HTTP:port;/;PathToPing; grouping, for example "HTTP:80/weather/us/wa/seattle". In this case, a HTTP GET request is issued to the instance on the given port and path. Any answer other than "200 OK" within the timeout period is considered unhealthy.</p>
* <p>The total length of the HTTP ping target must be 1024 16-bit Unicode characters or less.</p>
*/
Target: string | undefined;
/**
* <p>The approximate interval, in seconds, between health checks of an individual instance.</p>
*/
Interval: number | undefined;
/**
* <p>The amount of time, in seconds, during which no response means a failed health check.</p>
* <p>This value must be less than the <code>Interval</code> value.</p>
*/
Timeout: number | undefined;
/**
* <p>The number of consecutive health check failures required before moving the instance to the <code>Unhealthy</code> state.</p>
*/
UnhealthyThreshold: number | undefined;
/**
* <p>The number of consecutive health checks successes required before moving the instance to the <code>Healthy</code> state.</p>
*/
HealthyThreshold: number | undefined;
}
export namespace HealthCheck {
/**
* @internal
*/
export const filterSensitiveLog = (obj: HealthCheck): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for ConfigureHealthCheck.</p>
*/
export interface ConfigureHealthCheckInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The configuration information.</p>
*/
HealthCheck: HealthCheck | undefined;
}
export namespace ConfigureHealthCheckInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: ConfigureHealthCheckInput): any => ({
...obj,
});
}
/**
* <p>Contains the output of ConfigureHealthCheck.</p>
*/
export interface ConfigureHealthCheckOutput {
/**
* <p>The updated health check.</p>
*/
HealthCheck?: HealthCheck;
}
export namespace ConfigureHealthCheckOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: ConfigureHealthCheckOutput): any => ({
...obj,
});
}
/**
* <p>Information about the <code>ConnectionDraining</code> attribute.</p>
*/
export interface ConnectionDraining {
/**
* <p>Specifies whether connection draining is enabled for the load balancer.</p>
*/
Enabled: boolean | undefined;
/**
* <p>The maximum time, in seconds, to keep the existing connections open before deregistering the instances.</p>
*/
Timeout?: number;
}
export namespace ConnectionDraining {
/**
* @internal
*/
export const filterSensitiveLog = (obj: ConnectionDraining): any => ({
...obj,
});
}
/**
* <p>Information about the <code>ConnectionSettings</code> attribute.</p>
*/
export interface ConnectionSettings {
/**
* <p>The time, in seconds, that the connection is allowed to be idle (no data has been sent over the connection) before it is closed by the load balancer.</p>
*/
IdleTimeout: number | undefined;
}
export namespace ConnectionSettings {
/**
* @internal
*/
export const filterSensitiveLog = (obj: ConnectionSettings): any => ({
...obj,
});
}
/**
* <p>Information about a listener.</p>
* <p>For information about the protocols and the ports supported by Elastic Load Balancing, see <a href="https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html">Listeners for Your Classic Load Balancer</a>
* in the <i>Classic Load Balancers Guide</i>.</p>
*/
export interface Listener {
/**
* <p>The load balancer transport protocol to use for routing: HTTP, HTTPS, TCP, or SSL.</p>
*/
Protocol: string | undefined;
/**
* <p>The port on which the load balancer is listening. On EC2-VPC, you can specify any port from the range 1-65535. On EC2-Classic, you can specify any port from the following list: 25, 80, 443, 465, 587, 1024-65535.</p>
*/
LoadBalancerPort: number | undefined;
/**
* <p>The protocol to use for routing traffic to instances: HTTP, HTTPS, TCP, or SSL.</p>
* <p>If the front-end protocol is TCP or SSL, the back-end protocol must be TCP or SSL.
* If the front-end protocol is HTTP or HTTPS, the back-end protocol must be HTTP or HTTPS.</p>
* <p>If there is another listener with the same <code>InstancePort</code> whose <code>InstanceProtocol</code> is secure,
* (HTTPS or SSL), the listener's <code>InstanceProtocol</code> must also be secure.</p>
* <p>If there is another listener with the same <code>InstancePort</code> whose <code>InstanceProtocol</code> is HTTP or TCP,
* the listener's <code>InstanceProtocol</code> must be HTTP or TCP.</p>
*/
InstanceProtocol?: string;
/**
* <p>The port on which the instance is listening.</p>
*/
InstancePort: number | undefined;
/**
* <p>The Amazon Resource Name (ARN) of the server certificate.</p>
*/
SSLCertificateId?: string;
}
export namespace Listener {
/**
* @internal
*/
export const filterSensitiveLog = (obj: Listener): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for CreateLoadBalancer.</p>
*/
export interface CreateAccessPointInput {
/**
* <p>The name of the load balancer.</p>
* <p>This name must be unique within your set of load balancers for the region, must have a maximum of 32 characters, must contain only alphanumeric characters or hyphens, and cannot begin or end with a hyphen.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The listeners.</p>
* <p>For more information, see <a href="https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html">Listeners for Your Classic Load Balancer</a>
* in the <i>Classic Load Balancers Guide</i>.</p>
*/
Listeners: Listener[] | undefined;
/**
* <p>One or more Availability Zones from the same region as the load balancer.</p>
* <p>You must specify at least one Availability Zone.</p>
* <p>You can add more Availability Zones after you create the load balancer using
* <a>EnableAvailabilityZonesForLoadBalancer</a>.</p>
*/
AvailabilityZones?: string[];
/**
* <p>The IDs of the subnets in your VPC to attach to the load balancer.
* Specify one subnet per Availability Zone specified in <code>AvailabilityZones</code>.</p>
*/
Subnets?: string[];
/**
* <p>The IDs of the security groups to assign to the load balancer.</p>
*/
SecurityGroups?: string[];
/**
* <p>The type of a load balancer. Valid only for load balancers in a VPC.</p>
* <p>By default, Elastic Load Balancing creates an Internet-facing load balancer with a DNS name that resolves to public IP addresses.
* For more information about Internet-facing and Internal load balancers, see <a href="https://docs.aws.amazon.com/elasticloadbalancing/latest/userguide/how-elastic-load-balancing-works.html#load-balancer-scheme">Load Balancer Scheme</a>
* in the <i>Elastic Load Balancing User Guide</i>.</p>
* <p>Specify <code>internal</code> to create a load balancer with a DNS name that resolves to private IP addresses.</p>
*/
Scheme?: string;
/**
* <p>A list of tags to assign to the load balancer.</p>
* <p>For more information about tagging your load balancer, see <a href="https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/add-remove-tags.html">Tag Your Classic Load Balancer</a>
* in the <i>Classic Load Balancers Guide</i>.</p>
*/
Tags?: Tag[];
}
export namespace CreateAccessPointInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: CreateAccessPointInput): any => ({
...obj,
});
}
/**
* <p>Contains the output for CreateLoadBalancer.</p>
*/
export interface CreateAccessPointOutput {
/**
* <p>The DNS name of the load balancer.</p>
*/
DNSName?: string;
}
export namespace CreateAccessPointOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: CreateAccessPointOutput): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for CreateAppCookieStickinessPolicy.</p>
*/
export interface CreateAppCookieStickinessPolicyInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The name of the policy being created. Policy names must consist of alphanumeric characters and dashes (-). This name must be unique within the set of policies for this load balancer.</p>
*/
PolicyName: string | undefined;
/**
* <p>The name of the application cookie used for stickiness.</p>
*/
CookieName: string | undefined;
}
export namespace CreateAppCookieStickinessPolicyInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: CreateAppCookieStickinessPolicyInput): any => ({
...obj,
});
}
/**
* <p>Contains the output for CreateAppCookieStickinessPolicy.</p>
*/
export interface CreateAppCookieStickinessPolicyOutput {}
export namespace CreateAppCookieStickinessPolicyOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: CreateAppCookieStickinessPolicyOutput): any => ({
...obj,
});
}
/**
* <p>A policy with the specified name already exists for this load balancer.</p>
*/
export interface DuplicatePolicyNameException extends __SmithyException, $MetadataBearer {
name: "DuplicatePolicyNameException";
$fault: "client";
Message?: string;
}
export namespace DuplicatePolicyNameException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DuplicatePolicyNameException): any => ({
...obj,
});
}
/**
* <p>The quota for the number of policies for this load balancer has been reached.</p>
*/
export interface TooManyPoliciesException extends __SmithyException, $MetadataBearer {
name: "TooManyPoliciesException";
$fault: "client";
Message?: string;
}
export namespace TooManyPoliciesException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: TooManyPoliciesException): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for CreateLBCookieStickinessPolicy.</p>
*/
export interface CreateLBCookieStickinessPolicyInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The name of the policy being created. Policy names must consist of alphanumeric characters and dashes (-). This name must be unique within the set of policies for this load balancer.</p>
*/
PolicyName: string | undefined;
/**
* <p>The time period, in seconds, after which the cookie should be considered stale. If you do not specify this parameter, the default value is 0, which indicates that the sticky session should last for the duration of the browser session.</p>
*/
CookieExpirationPeriod?: number;
}
export namespace CreateLBCookieStickinessPolicyInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: CreateLBCookieStickinessPolicyInput): any => ({
...obj,
});
}
/**
* <p>Contains the output for CreateLBCookieStickinessPolicy.</p>
*/
export interface CreateLBCookieStickinessPolicyOutput {}
export namespace CreateLBCookieStickinessPolicyOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: CreateLBCookieStickinessPolicyOutput): any => ({
...obj,
});
}
/**
* <p>The specified load balancer name already exists for this account.</p>
*/
export interface DuplicateAccessPointNameException extends __SmithyException, $MetadataBearer {
name: "DuplicateAccessPointNameException";
$fault: "client";
Message?: string;
}
export namespace DuplicateAccessPointNameException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DuplicateAccessPointNameException): any => ({
...obj,
});
}
/**
* <p>The specified value for the schema is not valid. You can only specify a scheme for load balancers in a VPC.</p>
*/
export interface InvalidSchemeException extends __SmithyException, $MetadataBearer {
name: "InvalidSchemeException";
$fault: "client";
Message?: string;
}
export namespace InvalidSchemeException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: InvalidSchemeException): any => ({
...obj,
});
}
/**
* <p>This operation is not allowed.</p>
*/
export interface OperationNotPermittedException extends __SmithyException, $MetadataBearer {
name: "OperationNotPermittedException";
$fault: "client";
Message?: string;
}
export namespace OperationNotPermittedException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: OperationNotPermittedException): any => ({
...obj,
});
}
/**
* <p>The quota for the number of load balancers has been reached.</p>
*/
export interface TooManyAccessPointsException extends __SmithyException, $MetadataBearer {
name: "TooManyAccessPointsException";
$fault: "client";
Message?: string;
}
export namespace TooManyAccessPointsException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: TooManyAccessPointsException): any => ({
...obj,
});
}
/**
* <p>The specified protocol or signature version is not supported.</p>
*/
export interface UnsupportedProtocolException extends __SmithyException, $MetadataBearer {
name: "UnsupportedProtocolException";
$fault: "client";
Message?: string;
}
export namespace UnsupportedProtocolException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: UnsupportedProtocolException): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for CreateLoadBalancerListeners.</p>
*/
export interface CreateLoadBalancerListenerInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The listeners.</p>
*/
Listeners: Listener[] | undefined;
}
export namespace CreateLoadBalancerListenerInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: CreateLoadBalancerListenerInput): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for CreateLoadBalancerListener.</p>
*/
export interface CreateLoadBalancerListenerOutput {}
export namespace CreateLoadBalancerListenerOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: CreateLoadBalancerListenerOutput): any => ({
...obj,
});
}
/**
* <p>A listener already exists for the specified load balancer name and port, but with a different instance port, protocol, or SSL certificate.</p>
*/
export interface DuplicateListenerException extends __SmithyException, $MetadataBearer {
name: "DuplicateListenerException";
$fault: "client";
Message?: string;
}
export namespace DuplicateListenerException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DuplicateListenerException): any => ({
...obj,
});
}
/**
* <p>Information about a policy attribute.</p>
*/
export interface PolicyAttribute {
/**
* <p>The name of the attribute.</p>
*/
AttributeName?: string;
/**
* <p>The value of the attribute.</p>
*/
AttributeValue?: string;
}
export namespace PolicyAttribute {
/**
* @internal
*/
export const filterSensitiveLog = (obj: PolicyAttribute): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for CreateLoadBalancerPolicy.</p>
*/
export interface CreateLoadBalancerPolicyInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The name of the load balancer policy to be created. This name must be unique within the set of policies for this load balancer.</p>
*/
PolicyName: string | undefined;
/**
* <p>The name of the base policy type.
* To get the list of policy types, use <a>DescribeLoadBalancerPolicyTypes</a>.</p>
*/
PolicyTypeName: string | undefined;
/**
* <p>The policy attributes.</p>
*/
PolicyAttributes?: PolicyAttribute[];
}
export namespace CreateLoadBalancerPolicyInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: CreateLoadBalancerPolicyInput): any => ({
...obj,
});
}
/**
* <p>Contains the output of CreateLoadBalancerPolicy.</p>
*/
export interface CreateLoadBalancerPolicyOutput {}
export namespace CreateLoadBalancerPolicyOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: CreateLoadBalancerPolicyOutput): any => ({
...obj,
});
}
/**
* <p>One or more of the specified policy types do not exist.</p>
*/
export interface PolicyTypeNotFoundException extends __SmithyException, $MetadataBearer {
name: "PolicyTypeNotFoundException";
$fault: "client";
Message?: string;
}
export namespace PolicyTypeNotFoundException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: PolicyTypeNotFoundException): any => ({
...obj,
});
}
/**
* <p>Information about the <code>CrossZoneLoadBalancing</code> attribute.</p>
*/
export interface CrossZoneLoadBalancing {
/**
* <p>Specifies whether cross-zone load balancing is enabled for the load balancer.</p>
*/
Enabled: boolean | undefined;
}
export namespace CrossZoneLoadBalancing {
/**
* @internal
*/
export const filterSensitiveLog = (obj: CrossZoneLoadBalancing): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for DeleteLoadBalancer.</p>
*/
export interface DeleteAccessPointInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
}
export namespace DeleteAccessPointInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DeleteAccessPointInput): any => ({
...obj,
});
}
/**
* <p>Contains the output of DeleteLoadBalancer.</p>
*/
export interface DeleteAccessPointOutput {}
export namespace DeleteAccessPointOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DeleteAccessPointOutput): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for DeleteLoadBalancerListeners.</p>
*/
export interface DeleteLoadBalancerListenerInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The client port numbers of the listeners.</p>
*/
LoadBalancerPorts: number[] | undefined;
}
export namespace DeleteLoadBalancerListenerInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DeleteLoadBalancerListenerInput): any => ({
...obj,
});
}
/**
* <p>Contains the output of DeleteLoadBalancerListeners.</p>
*/
export interface DeleteLoadBalancerListenerOutput {}
export namespace DeleteLoadBalancerListenerOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DeleteLoadBalancerListenerOutput): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for DeleteLoadBalancerPolicy.</p>
*/
export interface DeleteLoadBalancerPolicyInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The name of the policy.</p>
*/
PolicyName: string | undefined;
}
export namespace DeleteLoadBalancerPolicyInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DeleteLoadBalancerPolicyInput): any => ({
...obj,
});
}
/**
* <p>Contains the output of DeleteLoadBalancerPolicy.</p>
*/
export interface DeleteLoadBalancerPolicyOutput {}
export namespace DeleteLoadBalancerPolicyOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DeleteLoadBalancerPolicyOutput): any => ({
...obj,
});
}
/**
* <p>A request made by Elastic Load Balancing to another service exceeds the maximum request rate permitted for your account.</p>
*/
export interface DependencyThrottleException extends __SmithyException, $MetadataBearer {
name: "DependencyThrottleException";
$fault: "client";
Message?: string;
}
export namespace DependencyThrottleException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DependencyThrottleException): any => ({
...obj,
});
}
/**
* <p>The ID of an EC2 instance.</p>
*/
export interface Instance {
/**
* <p>The instance ID.</p>
*/
InstanceId?: string;
}
export namespace Instance {
/**
* @internal
*/
export const filterSensitiveLog = (obj: Instance): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for DeregisterInstancesFromLoadBalancer.</p>
*/
export interface DeregisterEndPointsInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The IDs of the instances.</p>
*/
Instances: Instance[] | undefined;
}
export namespace DeregisterEndPointsInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DeregisterEndPointsInput): any => ({
...obj,
});
}
/**
* <p>Contains the output of DeregisterInstancesFromLoadBalancer.</p>
*/
export interface DeregisterEndPointsOutput {
/**
* <p>The remaining instances registered with the load balancer.</p>
*/
Instances?: Instance[];
}
export namespace DeregisterEndPointsOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DeregisterEndPointsOutput): any => ({
...obj,
});
}
/**
* <p>The specified endpoint is not valid.</p>
*/
export interface InvalidEndPointException extends __SmithyException, $MetadataBearer {
name: "InvalidEndPointException";
$fault: "client";
Message?: string;
}
export namespace InvalidEndPointException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: InvalidEndPointException): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for DescribeLoadBalancers.</p>
*/
export interface DescribeAccessPointsInput {
/**
* <p>The names of the load balancers.</p>
*/
LoadBalancerNames?: string[];
/**
* <p>The marker for the next set of results. (You received this marker from a previous call.)</p>
*/
Marker?: string;
/**
* <p>The maximum number of results to return with this call (a number from 1 to 400). The default is 400.</p>
*/
PageSize?: number;
}
export namespace DescribeAccessPointsInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DescribeAccessPointsInput): any => ({
...obj,
});
}
/**
* <p>The policies enabled for a listener.</p>
*/
export interface ListenerDescription {
/**
* <p>The listener.</p>
*/
Listener?: Listener;
/**
* <p>The policies. If there are no policies enabled, the list is empty.</p>
*/
PolicyNames?: string[];
}
export namespace ListenerDescription {
/**
* @internal
*/
export const filterSensitiveLog = (obj: ListenerDescription): any => ({
...obj,
});
}
/**
* <p>Information about a policy for duration-based session stickiness.</p>
*/
export interface LBCookieStickinessPolicy {
/**
* <p>The name of the policy. This name must be unique within the set of policies for this load balancer.</p>
*/
PolicyName?: string;
/**
* <p>The time period, in seconds, after which the cookie should be considered stale. If this parameter is not specified, the stickiness session lasts for the duration of the browser session.</p>
*/
CookieExpirationPeriod?: number;
}
export namespace LBCookieStickinessPolicy {
/**
* @internal
*/
export const filterSensitiveLog = (obj: LBCookieStickinessPolicy): any => ({
...obj,
});
}
/**
* <p>The policies for a load balancer.</p>
*/
export interface Policies {
/**
* <p>The stickiness policies created using <a>CreateAppCookieStickinessPolicy</a>.</p>
*/
AppCookieStickinessPolicies?: AppCookieStickinessPolicy[];
/**
* <p>The stickiness policies created using <a>CreateLBCookieStickinessPolicy</a>.</p>
*/
LBCookieStickinessPolicies?: LBCookieStickinessPolicy[];
/**
* <p>The policies other than the stickiness policies.</p>
*/
OtherPolicies?: string[];
}
export namespace Policies {
/**
* @internal
*/
export const filterSensitiveLog = (obj: Policies): any => ({
...obj,
});
}
/**
* <p>Information about a source security group.</p>
*/
export interface SourceSecurityGroup {
/**
* <p>The owner of the security group.</p>
*/
OwnerAlias?: string;
/**
* <p>The name of the security group.</p>
*/
GroupName?: string;
}
export namespace SourceSecurityGroup {
/**
* @internal
*/
export const filterSensitiveLog = (obj: SourceSecurityGroup): any => ({
...obj,
});
}
/**
* <p>Information about a load balancer.</p>
*/
export interface LoadBalancerDescription {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName?: string;
/**
* <p>The DNS name of the load balancer.</p>
*/
DNSName?: string;
/**
* <p>The DNS name of the load balancer.</p>
* <p>For more information, see <a href="https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/using-domain-names-with-elb.html">Configure a Custom Domain Name</a>
* in the <i>Classic Load Balancers Guide</i>.</p>
*/
CanonicalHostedZoneName?: string;
/**
* <p>The ID of the Amazon Route 53 hosted zone for the load balancer.</p>
*/
CanonicalHostedZoneNameID?: string;
/**
* <p>The listeners for the load balancer.</p>
*/
ListenerDescriptions?: ListenerDescription[];
/**
* <p>The policies defined for the load balancer.</p>
*/
Policies?: Policies;
/**
* <p>Information about your EC2 instances.</p>
*/
BackendServerDescriptions?: BackendServerDescription[];
/**
* <p>The Availability Zones for the load balancer.</p>
*/
AvailabilityZones?: string[];
/**
* <p>The IDs of the subnets for the load balancer.</p>
*/
Subnets?: string[];
/**
* <p>The ID of the VPC for the load balancer.</p>
*/
VPCId?: string;
/**
* <p>The IDs of the instances for the load balancer.</p>
*/
Instances?: Instance[];
/**
* <p>Information about the health checks conducted on the load balancer.</p>
*/
HealthCheck?: HealthCheck;
/**
* <p>The security group for the load balancer, which you can use as part of your inbound rules for your registered instances.
* To only allow traffic from load balancers, add a security group rule that specifies this source security group as the inbound source.</p>
*/
SourceSecurityGroup?: SourceSecurityGroup;
/**
* <p>The security groups for the load balancer. Valid only for load balancers in a VPC.</p>
*/
SecurityGroups?: string[];
/**
* <p>The date and time the load balancer was created.</p>
*/
CreatedTime?: Date;
/**
* <p>The type of load balancer. Valid only for load balancers in a VPC.</p>
* <p>If <code>Scheme</code> is <code>internet-facing</code>, the load balancer
* has a public DNS name that resolves to a public IP address.</p>
* <p>If <code>Scheme</code> is <code>internal</code>, the load balancer has a public
* DNS name that resolves to a private IP address.</p>
*/
Scheme?: string;
}
export namespace LoadBalancerDescription {
/**
* @internal
*/
export const filterSensitiveLog = (obj: LoadBalancerDescription): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for DescribeLoadBalancers.</p>
*/
export interface DescribeAccessPointsOutput {
/**
* <p>Information about the load balancers.</p>
*/
LoadBalancerDescriptions?: LoadBalancerDescription[];
/**
* <p>The marker to use when requesting the next set of results. If there are no additional results, the string is empty.</p>
*/
NextMarker?: string;
}
export namespace DescribeAccessPointsOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DescribeAccessPointsOutput): any => ({
...obj,
});
}
export interface DescribeAccountLimitsInput {
/**
* <p>The marker for the next set of results. (You received this marker from a previous call.)</p>
*/
Marker?: string;
/**
* <p>The maximum number of results to return with this call.</p>
*/
PageSize?: number;
}
export namespace DescribeAccountLimitsInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DescribeAccountLimitsInput): any => ({
...obj,
});
}
/**
* <p>Information about an Elastic Load Balancing resource limit for your AWS account.</p>
*/
export interface Limit {
/**
* <p>The name of the limit. The possible values are:</p>
* <ul>
* <li>
* <p>classic-listeners</p>
* </li>
* <li>
* <p>classic-load-balancers</p>
* </li>
* <li>
* <p>classic-registered-instances</p>
* </li>
* </ul>
*/
Name?: string;
/**
* <p>The maximum value of the limit.</p>
*/
Max?: string;
}
export namespace Limit {
/**
* @internal
*/
export const filterSensitiveLog = (obj: Limit): any => ({
...obj,
});
}
export interface DescribeAccountLimitsOutput {
/**
* <p>Information about the limits.</p>
*/
Limits?: Limit[];
/**
* <p>The marker to use when requesting the next set of results. If there are no additional results, the string is empty.</p>
*/
NextMarker?: string;
}
export namespace DescribeAccountLimitsOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DescribeAccountLimitsOutput): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for DescribeInstanceHealth.</p>
*/
export interface DescribeEndPointStateInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The IDs of the instances.</p>
*/
Instances?: Instance[];
}
export namespace DescribeEndPointStateInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DescribeEndPointStateInput): any => ({
...obj,
});
}
/**
* <p>Information about the state of an EC2 instance.</p>
*/
export interface InstanceState {
/**
* <p>The ID of the instance.</p>
*/
InstanceId?: string;
/**
* <p>The current state of the instance.</p>
* <p>Valid values: <code>InService</code> | <code>OutOfService</code> | <code>Unknown</code>
* </p>
*/
State?: string;
/**
* <p>Information about the cause of <code>OutOfService</code> instances.
* Specifically, whether the cause is Elastic Load Balancing or the instance.</p>
* <p>Valid values: <code>ELB</code> | <code>Instance</code> | <code>N/A</code>
* </p>
*/
ReasonCode?: string;
/**
* <p>A description of the instance state. This string can contain one or more of the following messages.</p>
* <ul>
* <li>
* <p>
* <code>N/A</code>
* </p>
* </li>
* <li>
* <p>
* <code>A transient error occurred. Please try again later.</code>
* </p>
* </li>
* <li>
* <p>
* <code>Instance has failed at least the UnhealthyThreshold number of health checks consecutively.</code>
* </p>
* </li>
* <li>
* <p>
* <code>Instance has not passed the configured HealthyThreshold number of health checks consecutively.</code>
* </p>
* </li>
* <li>
* <p>
* <code>Instance registration is still in progress.</code>
* </p>
* </li>
* <li>
* <p>
* <code>Instance is in the EC2 Availability Zone for which LoadBalancer is not configured to route traffic to.</code>
* </p>
* </li>
* <li>
* <p>
* <code>Instance is not currently registered with the LoadBalancer.</code>
* </p>
* </li>
* <li>
* <p>
* <code>Instance deregistration currently in progress.</code>
* </p>
* </li>
* <li>
* <p>
* <code>Disable Availability Zone is currently in progress.</code>
* </p>
* </li>
* <li>
* <p>
* <code>Instance is in pending state.</code>
* </p>
* </li>
* <li>
* <p>
* <code>Instance is in stopped state.</code>
* </p>
* </li>
* <li>
* <p>
* <code>Instance is in terminated state.</code>
* </p>
* </li>
* </ul>
*/
Description?: string;
}
export namespace InstanceState {
/**
* @internal
*/
export const filterSensitiveLog = (obj: InstanceState): any => ({
...obj,
});
}
/**
* <p>Contains the output for DescribeInstanceHealth.</p>
*/
export interface DescribeEndPointStateOutput {
/**
* <p>Information about the health of the instances.</p>
*/
InstanceStates?: InstanceState[];
}
export namespace DescribeEndPointStateOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DescribeEndPointStateOutput): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for DescribeLoadBalancerAttributes.</p>
*/
export interface DescribeLoadBalancerAttributesInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
}
export namespace DescribeLoadBalancerAttributesInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DescribeLoadBalancerAttributesInput): any => ({
...obj,
});
}
/**
* <p>The attributes for a load balancer.</p>
*/
export interface LoadBalancerAttributes {
/**
* <p>If enabled, the load balancer routes the request traffic evenly across all instances regardless of the Availability Zones.</p>
* <p>For more information, see <a href="https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-disable-crosszone-lb.html">Configure Cross-Zone Load Balancing</a>
* in the <i>Classic Load Balancers Guide</i>.</p>
*/
CrossZoneLoadBalancing?: CrossZoneLoadBalancing;
/**
* <p>If enabled, the load balancer captures detailed information of all requests and delivers the information to the Amazon S3 bucket that you specify.</p>
* <p>For more information, see <a href="https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-access-logs.html">Enable Access Logs</a>
* in the <i>Classic Load Balancers Guide</i>.</p>
*/
AccessLog?: AccessLog;
/**
* <p>If enabled, the load balancer allows existing requests to complete before the load balancer shifts traffic away from a deregistered or unhealthy instance.</p>
* <p>For more information, see <a href="https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/config-conn-drain.html">Configure Connection Draining</a>
* in the <i>Classic Load Balancers Guide</i>.</p>
*/
ConnectionDraining?: ConnectionDraining;
/**
* <p>If enabled, the load balancer allows the connections to remain idle (no data is sent over the connection) for the specified duration.</p>
* <p>By default, Elastic Load Balancing maintains a 60-second idle connection timeout for both front-end and back-end connections of your load balancer.
* For more information, see <a href="https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/config-idle-timeout.html">Configure Idle Connection Timeout</a>
* in the <i>Classic Load Balancers Guide</i>.</p>
*/
ConnectionSettings?: ConnectionSettings;
/**
* <p>Any additional attributes.</p>
*/
AdditionalAttributes?: AdditionalAttribute[];
}
export namespace LoadBalancerAttributes {
/**
* @internal
*/
export const filterSensitiveLog = (obj: LoadBalancerAttributes): any => ({
...obj,
});
}
/**
* <p>Contains the output of DescribeLoadBalancerAttributes.</p>
*/
export interface DescribeLoadBalancerAttributesOutput {
/**
* <p>Information about the load balancer attributes.</p>
*/
LoadBalancerAttributes?: LoadBalancerAttributes;
}
export namespace DescribeLoadBalancerAttributesOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DescribeLoadBalancerAttributesOutput): any => ({
...obj,
});
}
/**
* <p>The specified load balancer attribute does not exist.</p>
*/
export interface LoadBalancerAttributeNotFoundException extends __SmithyException, $MetadataBearer {
name: "LoadBalancerAttributeNotFoundException";
$fault: "client";
Message?: string;
}
export namespace LoadBalancerAttributeNotFoundException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: LoadBalancerAttributeNotFoundException): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for DescribeLoadBalancerPolicies.</p>
*/
export interface DescribeLoadBalancerPoliciesInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName?: string;
/**
* <p>The names of the policies.</p>
*/
PolicyNames?: string[];
}
export namespace DescribeLoadBalancerPoliciesInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DescribeLoadBalancerPoliciesInput): any => ({
...obj,
});
}
/**
* <p>Information about a policy attribute.</p>
*/
export interface PolicyAttributeDescription {
/**
* <p>The name of the attribute.</p>
*/
AttributeName?: string;
/**
* <p>The value of the attribute.</p>
*/
AttributeValue?: string;
}
export namespace PolicyAttributeDescription {
/**
* @internal
*/
export const filterSensitiveLog = (obj: PolicyAttributeDescription): any => ({
...obj,
});
}
/**
* <p>Information about a policy.</p>
*/
export interface PolicyDescription {
/**
* <p>The name of the policy.</p>
*/
PolicyName?: string;
/**
* <p>The name of the policy type.</p>
*/
PolicyTypeName?: string;
/**
* <p>The policy attributes.</p>
*/
PolicyAttributeDescriptions?: PolicyAttributeDescription[];
}
export namespace PolicyDescription {
/**
* @internal
*/
export const filterSensitiveLog = (obj: PolicyDescription): any => ({
...obj,
});
}
/**
* <p>Contains the output of DescribeLoadBalancerPolicies.</p>
*/
export interface DescribeLoadBalancerPoliciesOutput {
/**
* <p>Information about the policies.</p>
*/
PolicyDescriptions?: PolicyDescription[];
}
export namespace DescribeLoadBalancerPoliciesOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DescribeLoadBalancerPoliciesOutput): any => ({
...obj,
});
}
/**
* <p>One or more of the specified policies do not exist.</p>
*/
export interface PolicyNotFoundException extends __SmithyException, $MetadataBearer {
name: "PolicyNotFoundException";
$fault: "client";
Message?: string;
}
export namespace PolicyNotFoundException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: PolicyNotFoundException): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for DescribeLoadBalancerPolicyTypes.</p>
*/
export interface DescribeLoadBalancerPolicyTypesInput {
/**
* <p>The names of the policy types. If no names are specified, describes all policy types defined by Elastic Load Balancing.</p>
*/
PolicyTypeNames?: string[];
}
export namespace DescribeLoadBalancerPolicyTypesInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DescribeLoadBalancerPolicyTypesInput): any => ({
...obj,
});
}
/**
* <p>Information about a policy attribute type.</p>
*/
export interface PolicyAttributeTypeDescription {
/**
* <p>The name of the attribute.</p>
*/
AttributeName?: string;
/**
* <p>The type of the attribute. For example, <code>Boolean</code> or <code>Integer</code>.</p>
*/
AttributeType?: string;
/**
* <p>A description of the attribute.</p>
*/
Description?: string;
/**
* <p>The default value of the attribute, if applicable.</p>
*/
DefaultValue?: string;
/**
* <p>The cardinality of the attribute.</p>
* <p>Valid values:</p>
* <ul>
* <li>
* <p>ONE(1) : Single value required</p>
* </li>
* <li>
* <p>ZERO_OR_ONE(0..1) : Up to one value is allowed</p>
* </li>
* <li>
* <p>ZERO_OR_MORE(0..*) : Optional. Multiple values are allowed</p>
* </li>
* <li>
* <p>ONE_OR_MORE(1..*0) : Required. Multiple values are allowed</p>
* </li>
* </ul>
*/
Cardinality?: string;
}
export namespace PolicyAttributeTypeDescription {
/**
* @internal
*/
export const filterSensitiveLog = (obj: PolicyAttributeTypeDescription): any => ({
...obj,
});
}
/**
* <p>Information about a policy type.</p>
*/
export interface PolicyTypeDescription {
/**
* <p>The name of the policy type.</p>
*/
PolicyTypeName?: string;
/**
* <p>A description of the policy type.</p>
*/
Description?: string;
/**
* <p>The description of the policy attributes associated with the policies defined by Elastic Load Balancing.</p>
*/
PolicyAttributeTypeDescriptions?: PolicyAttributeTypeDescription[];
}
export namespace PolicyTypeDescription {
/**
* @internal
*/
export const filterSensitiveLog = (obj: PolicyTypeDescription): any => ({
...obj,
});
}
/**
* <p>Contains the output of DescribeLoadBalancerPolicyTypes.</p>
*/
export interface DescribeLoadBalancerPolicyTypesOutput {
/**
* <p>Information about the policy types.</p>
*/
PolicyTypeDescriptions?: PolicyTypeDescription[];
}
export namespace DescribeLoadBalancerPolicyTypesOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DescribeLoadBalancerPolicyTypesOutput): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for DescribeTags.</p>
*/
export interface DescribeTagsInput {
/**
* <p>The names of the load balancers.</p>
*/
LoadBalancerNames: string[] | undefined;
}
export namespace DescribeTagsInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DescribeTagsInput): any => ({
...obj,
});
}
/**
* <p>The tags associated with a load balancer.</p>
*/
export interface TagDescription {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName?: string;
/**
* <p>The tags.</p>
*/
Tags?: Tag[];
}
export namespace TagDescription {
/**
* @internal
*/
export const filterSensitiveLog = (obj: TagDescription): any => ({
...obj,
});
}
/**
* <p>Contains the output for DescribeTags.</p>
*/
export interface DescribeTagsOutput {
/**
* <p>Information about the tags.</p>
*/
TagDescriptions?: TagDescription[];
}
export namespace DescribeTagsOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DescribeTagsOutput): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for DetachLoadBalancerFromSubnets.</p>
*/
export interface DetachLoadBalancerFromSubnetsInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The IDs of the subnets.</p>
*/
Subnets: string[] | undefined;
}
export namespace DetachLoadBalancerFromSubnetsInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DetachLoadBalancerFromSubnetsInput): any => ({
...obj,
});
}
/**
* <p>Contains the output of DetachLoadBalancerFromSubnets.</p>
*/
export interface DetachLoadBalancerFromSubnetsOutput {
/**
* <p>The IDs of the remaining subnets for the load balancer.</p>
*/
Subnets?: string[];
}
export namespace DetachLoadBalancerFromSubnetsOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: DetachLoadBalancerFromSubnetsOutput): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for DisableAvailabilityZonesForLoadBalancer.</p>
*/
export interface RemoveAvailabilityZonesInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The Availability Zones.</p>
*/
AvailabilityZones: string[] | undefined;
}
export namespace RemoveAvailabilityZonesInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: RemoveAvailabilityZonesInput): any => ({
...obj,
});
}
/**
* <p>Contains the output for DisableAvailabilityZonesForLoadBalancer.</p>
*/
export interface RemoveAvailabilityZonesOutput {
/**
* <p>The remaining Availability Zones for the load balancer.</p>
*/
AvailabilityZones?: string[];
}
export namespace RemoveAvailabilityZonesOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: RemoveAvailabilityZonesOutput): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for ModifyLoadBalancerAttributes.</p>
*/
export interface ModifyLoadBalancerAttributesInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The attributes for the load balancer.</p>
*/
LoadBalancerAttributes: LoadBalancerAttributes | undefined;
}
export namespace ModifyLoadBalancerAttributesInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: ModifyLoadBalancerAttributesInput): any => ({
...obj,
});
}
/**
* <p>Contains the output of ModifyLoadBalancerAttributes.</p>
*/
export interface ModifyLoadBalancerAttributesOutput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName?: string;
/**
* <p>Information about the load balancer attributes.</p>
*/
LoadBalancerAttributes?: LoadBalancerAttributes;
}
export namespace ModifyLoadBalancerAttributesOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: ModifyLoadBalancerAttributesOutput): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for RegisterInstancesWithLoadBalancer.</p>
*/
export interface RegisterEndPointsInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The IDs of the instances.</p>
*/
Instances: Instance[] | undefined;
}
export namespace RegisterEndPointsInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: RegisterEndPointsInput): any => ({
...obj,
});
}
/**
* <p>Contains the output of RegisterInstancesWithLoadBalancer.</p>
*/
export interface RegisterEndPointsOutput {
/**
* <p>The updated list of instances for the load balancer.</p>
*/
Instances?: Instance[];
}
export namespace RegisterEndPointsOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: RegisterEndPointsOutput): any => ({
...obj,
});
}
/**
* <p>The key of a tag.</p>
*/
export interface TagKeyOnly {
/**
* <p>The name of the key.</p>
*/
Key?: string;
}
export namespace TagKeyOnly {
/**
* @internal
*/
export const filterSensitiveLog = (obj: TagKeyOnly): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for RemoveTags.</p>
*/
export interface RemoveTagsInput {
/**
* <p>The name of the load balancer. You can specify a maximum of one load balancer name.</p>
*/
LoadBalancerNames: string[] | undefined;
/**
* <p>The list of tag keys to remove.</p>
*/
Tags: TagKeyOnly[] | undefined;
}
export namespace RemoveTagsInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: RemoveTagsInput): any => ({
...obj,
});
}
/**
* <p>Contains the output of RemoveTags.</p>
*/
export interface RemoveTagsOutput {}
export namespace RemoveTagsOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: RemoveTagsOutput): any => ({
...obj,
});
}
/**
* <p>The load balancer does not have a listener configured at the specified port.</p>
*/
export interface ListenerNotFoundException extends __SmithyException, $MetadataBearer {
name: "ListenerNotFoundException";
$fault: "client";
Message?: string;
}
export namespace ListenerNotFoundException {
/**
* @internal
*/
export const filterSensitiveLog = (obj: ListenerNotFoundException): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for SetLoadBalancerListenerSSLCertificate.</p>
*/
export interface SetLoadBalancerListenerSSLCertificateInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The port that uses the specified SSL certificate.</p>
*/
LoadBalancerPort: number | undefined;
/**
* <p>The Amazon Resource Name (ARN) of the SSL certificate.</p>
*/
SSLCertificateId: string | undefined;
}
export namespace SetLoadBalancerListenerSSLCertificateInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: SetLoadBalancerListenerSSLCertificateInput): any => ({
...obj,
});
}
/**
* <p>Contains the output of SetLoadBalancerListenerSSLCertificate.</p>
*/
export interface SetLoadBalancerListenerSSLCertificateOutput {}
export namespace SetLoadBalancerListenerSSLCertificateOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: SetLoadBalancerListenerSSLCertificateOutput): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for SetLoadBalancerPoliciesForBackendServer.</p>
*/
export interface SetLoadBalancerPoliciesForBackendServerInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The port number associated with the EC2 instance.</p>
*/
InstancePort: number | undefined;
/**
* <p>The names of the policies. If the list is empty, then all current polices are removed from the EC2 instance.</p>
*/
PolicyNames: string[] | undefined;
}
export namespace SetLoadBalancerPoliciesForBackendServerInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: SetLoadBalancerPoliciesForBackendServerInput): any => ({
...obj,
});
}
/**
* <p>Contains the output of SetLoadBalancerPoliciesForBackendServer.</p>
*/
export interface SetLoadBalancerPoliciesForBackendServerOutput {}
export namespace SetLoadBalancerPoliciesForBackendServerOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: SetLoadBalancerPoliciesForBackendServerOutput): any => ({
...obj,
});
}
/**
* <p>Contains the parameters for SetLoadBalancePoliciesOfListener.</p>
*/
export interface SetLoadBalancerPoliciesOfListenerInput {
/**
* <p>The name of the load balancer.</p>
*/
LoadBalancerName: string | undefined;
/**
* <p>The external port of the load balancer.</p>
*/
LoadBalancerPort: number | undefined;
/**
* <p>The names of the policies. This list must include all policies to be enabled. If you omit a policy that is currently enabled, it is disabled. If the list is empty, all current policies are disabled.</p>
*/
PolicyNames: string[] | undefined;
}
export namespace SetLoadBalancerPoliciesOfListenerInput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: SetLoadBalancerPoliciesOfListenerInput): any => ({
...obj,
});
}
/**
* <p>Contains the output of SetLoadBalancePoliciesOfListener.</p>
*/
export interface SetLoadBalancerPoliciesOfListenerOutput {}
export namespace SetLoadBalancerPoliciesOfListenerOutput {
/**
* @internal
*/
export const filterSensitiveLog = (obj: SetLoadBalancerPoliciesOfListenerOutput): any => ({
...obj,
});
} | the_stack |
import { expect } from "chai";
import { IModelConnection, SnapshotConnection } from "@itwin/core-frontend";
import {
ChildNodeSpecificationTypes, InstanceLabelOverrideValueSpecificationType, Ruleset, RuleTypes, VariableValueTypes,
} from "@itwin/presentation-common";
import { Presentation } from "@itwin/presentation-frontend";
import { initialize, terminate } from "../../IntegrationTests";
import { printRuleset } from "../Utils";
describe("Learning Snippets", () => {
let imodel: IModelConnection;
beforeEach(async () => {
await initialize();
imodel = await SnapshotConnection.openFile("assets/datasets/Properties_60InstancesWithUrl2.ibim");
});
afterEach(async () => {
await imodel.close();
await terminate();
});
describe("Customization Rules", () => {
describe("PropertySortingRule", () => {
it("uses `priority` attribute", async () => {
// __PUBLISH_EXTRACT_START__ Presentation.PropertySortingRule.Priority.Ruleset
// The ruleset has root node rule that returns `bis.SpatialViewDefinition` instances with labels
// consisting of `Roll` and `Pitch` property values. Also there are two customization rules to sort
// instances by `Roll` and `Pitch` properties. The rules have different priorities and higher priority
// rule is handled first.
const ruleset: Ruleset = {
id: "example",
rules: [{
ruleType: RuleTypes.RootNodes,
specifications: [{
specType: ChildNodeSpecificationTypes.InstanceNodesOfSpecificClasses,
classes: { schemaName: "BisCore", classNames: ["SpatialViewDefinition"] },
groupByClass: false,
groupByLabel: false,
}],
}, {
ruleType: RuleTypes.InstanceLabelOverride,
class: { schemaName: "BisCore", className: "SpatialViewDefinition" },
values: [{
specType: InstanceLabelOverrideValueSpecificationType.Composite,
separator: " x ",
parts: [
{ spec: { specType: InstanceLabelOverrideValueSpecificationType.Property, propertyName: "Roll" } },
{ spec: { specType: InstanceLabelOverrideValueSpecificationType.Property, propertyName: "Pitch" } },
],
}],
}, {
ruleType: RuleTypes.PropertySorting,
priority: 1,
class: { schemaName: "BisCore", className: "SpatialViewDefinition" },
propertyName: "Roll",
}, {
ruleType: RuleTypes.PropertySorting,
priority: 2,
class: { schemaName: "BisCore", className: "SpatialViewDefinition" },
propertyName: "Pitch",
}],
};
// __PUBLISH_EXTRACT_END__
printRuleset(ruleset);
// verify that nodes are sorted by `Pitch` property
const nodes = await Presentation.presentation.getNodes({
imodel,
rulesetOrId: ruleset,
});
expect(nodes).to.be.lengthOf(4);
expect(nodes[0]).to.containSubset({ label: { displayValue: "-107.42 x -160.99" } });
expect(nodes[1]).to.containSubset({ label: { displayValue: "-45.00 x -35.26" } });
expect(nodes[2]).to.containSubset({ label: { displayValue: "-90.00 x 0.00" } });
expect(nodes[3]).to.containSubset({ label: { displayValue: "0.00 x 90.00" } });
});
it("uses `condition` attribute", async () => {
// __PUBLISH_EXTRACT_START__ Presentation.PropertySortingRule.Condition.Ruleset
// The ruleset has root node rule that returns `bis.SpatialViewDefinition` instances with labels
// consisting of `Roll` and `Pitch` property values. Also there are customization rule to sort
// instances by `Pitch` property.
const ruleset: Ruleset = {
id: "example",
rules: [{
ruleType: RuleTypes.RootNodes,
specifications: [{
specType: ChildNodeSpecificationTypes.InstanceNodesOfSpecificClasses,
classes: { schemaName: "BisCore", classNames: ["SpatialViewDefinition"] },
groupByClass: false,
groupByLabel: false,
}],
}, {
ruleType: RuleTypes.InstanceLabelOverride,
class: { schemaName: "BisCore", className: "SpatialViewDefinition" },
values: [{
specType: InstanceLabelOverrideValueSpecificationType.Composite,
separator: " x ",
parts: [
{ spec: { specType: InstanceLabelOverrideValueSpecificationType.Property, propertyName: "Roll" } },
{ spec: { specType: InstanceLabelOverrideValueSpecificationType.Property, propertyName: "Pitch" } },
],
}],
}, {
ruleType: RuleTypes.PropertySorting,
condition: "TRUE",
propertyName: "Pitch",
}],
};
// __PUBLISH_EXTRACT_END__
printRuleset(ruleset);
// verify that nodes are sorted by `Pitch` property
const nodes = await Presentation.presentation.getNodes({
imodel,
rulesetOrId: ruleset,
rulesetVariables: [{ id: "SORT_INSTANCES", type: VariableValueTypes.Bool, value: true }],
});
expect(nodes).to.be.lengthOf(4);
expect(nodes[0]).to.containSubset({ label: { displayValue: "-107.42 x -160.99" } });
expect(nodes[1]).to.containSubset({ label: { displayValue: "-45.00 x -35.26" } });
expect(nodes[2]).to.containSubset({ label: { displayValue: "-90.00 x 0.00" } });
expect(nodes[3]).to.containSubset({ label: { displayValue: "0.00 x 90.00" } });
});
it("uses `class` attribute", async () => {
// __PUBLISH_EXTRACT_START__ Presentation.PropertySortingRule.Class.Ruleset
// The ruleset has root node rule that returns `bis.SpatialViewDefinition` instances with labels
// consisting of `Roll` and `Pitch` property values. Also there are customization rule to sort
// `bis.SpatialViewDefinition` instances by `Pitch` property
const ruleset: Ruleset = {
id: "example",
rules: [{
ruleType: RuleTypes.RootNodes,
specifications: [{
specType: ChildNodeSpecificationTypes.InstanceNodesOfSpecificClasses,
classes: { schemaName: "BisCore", classNames: ["SpatialViewDefinition"] },
groupByClass: false,
groupByLabel: false,
}],
}, {
ruleType: RuleTypes.InstanceLabelOverride,
class: { schemaName: "BisCore", className: "SpatialViewDefinition" },
values: [{
specType: InstanceLabelOverrideValueSpecificationType.Composite,
separator: " x ",
parts: [
{ spec: { specType: InstanceLabelOverrideValueSpecificationType.Property, propertyName: "Roll" } },
{ spec: { specType: InstanceLabelOverrideValueSpecificationType.Property, propertyName: "Pitch" } },
],
}],
}, {
ruleType: RuleTypes.PropertySorting,
class: { schemaName: "BisCore", className: "SpatialViewDefinition" },
propertyName: "Pitch",
}],
};
// __PUBLISH_EXTRACT_END__
printRuleset(ruleset);
// verify that nodes are sorted by `Pitch` property
const nodes = await Presentation.presentation.getNodes({
imodel,
rulesetOrId: ruleset,
});
expect(nodes).to.be.lengthOf(4);
expect(nodes[0]).to.containSubset({ label: { displayValue: "-107.42 x -160.99" } });
expect(nodes[1]).to.containSubset({ label: { displayValue: "-45.00 x -35.26" } });
expect(nodes[2]).to.containSubset({ label: { displayValue: "-90.00 x 0.00" } });
expect(nodes[3]).to.containSubset({ label: { displayValue: "0.00 x 90.00" } });
});
it("uses `isPolymorphic` attribute", async () => {
// __PUBLISH_EXTRACT_START__ Presentation.PropertySortingRule.IsPolymorphic.Ruleset
// This ruleset lists `bis.SpatialViewDefinition` instances with their `Roll` and `Pitch` properties as instance
// labels. Sorting rule targets `bis.ViewDefinition3d`, the base class of `bis.SpatialViewDefinition`, so to
// sort instances of the derived classes, `isPolymorphic` attribute needs to be `true`.
const ruleset: Ruleset = {
id: "example",
rules: [{
ruleType: RuleTypes.RootNodes,
specifications: [{
specType: ChildNodeSpecificationTypes.InstanceNodesOfSpecificClasses,
classes: { schemaName: "BisCore", classNames: ["SpatialViewDefinition"] },
groupByClass: false,
groupByLabel: false,
}],
}, {
ruleType: RuleTypes.InstanceLabelOverride,
class: { schemaName: "BisCore", className: "SpatialViewDefinition" },
values: [{
specType: InstanceLabelOverrideValueSpecificationType.Composite,
separator: " x ",
parts: [
{ spec: { specType: InstanceLabelOverrideValueSpecificationType.Property, propertyName: "Roll" } },
{ spec: { specType: InstanceLabelOverrideValueSpecificationType.Property, propertyName: "Pitch" } },
],
}],
}, {
ruleType: RuleTypes.PropertySorting,
class: { schemaName: "BisCore", className: "ViewDefinition3d" },
isPolymorphic: true,
propertyName: "Pitch",
}],
};
// __PUBLISH_EXTRACT_END__
printRuleset(ruleset);
// verify that nodes of `bis.SpatialViewDefinition` class instances are sorted
const nodes = await Presentation.presentation.getNodes({
imodel,
rulesetOrId: ruleset,
});
expect(nodes).to.be.lengthOf(4);
expect(nodes[0]).to.containSubset({ label: { displayValue: "-107.42 x -160.99" } });
expect(nodes[1]).to.containSubset({ label: { displayValue: "-45.00 x -35.26" } });
expect(nodes[2]).to.containSubset({ label: { displayValue: "-90.00 x 0.00" } });
expect(nodes[3]).to.containSubset({ label: { displayValue: "0.00 x 90.00" } });
});
it("uses `propertyName` attribute", async () => {
// __PUBLISH_EXTRACT_START__ Presentation.PropertySortingRule.PropertyName.Ruleset
// The ruleset has root node rule that returns `bis.SpatialViewDefinition` instances with labels
// consisting of `Roll` and `Pitch` property values. Also there are customization rule to sort
// instances of any class by `Pitch` property.
const ruleset: Ruleset = {
id: "example",
rules: [{
ruleType: RuleTypes.RootNodes,
specifications: [{
specType: ChildNodeSpecificationTypes.InstanceNodesOfSpecificClasses,
classes: { schemaName: "BisCore", classNames: ["SpatialViewDefinition"] },
groupByClass: false,
groupByLabel: false,
}],
}, {
ruleType: RuleTypes.InstanceLabelOverride,
class: { schemaName: "BisCore", className: "SpatialViewDefinition" },
values: [{
specType: InstanceLabelOverrideValueSpecificationType.Composite,
separator: " x ",
parts: [
{ spec: { specType: InstanceLabelOverrideValueSpecificationType.Property, propertyName: "Roll" } },
{ spec: { specType: InstanceLabelOverrideValueSpecificationType.Property, propertyName: "Pitch" } },
],
}],
}, {
ruleType: RuleTypes.PropertySorting,
propertyName: "Pitch",
}],
};
// __PUBLISH_EXTRACT_END__
printRuleset(ruleset);
// verify that nodes are sorted by `Pitch` property
const nodes = await Presentation.presentation.getNodes({
imodel,
rulesetOrId: ruleset,
});
expect(nodes).to.be.lengthOf(4);
expect(nodes[0]).to.containSubset({ label: { displayValue: "-107.42 x -160.99" } });
expect(nodes[1]).to.containSubset({ label: { displayValue: "-45.00 x -35.26" } });
expect(nodes[2]).to.containSubset({ label: { displayValue: "-90.00 x 0.00" } });
expect(nodes[3]).to.containSubset({ label: { displayValue: "0.00 x 90.00" } });
});
it("uses `sortAscending` attribute", async () => {
// __PUBLISH_EXTRACT_START__ Presentation.PropertySortingRule.SortAscending.Ruleset
// The ruleset has root node rule that returns `bis.SpatialViewDefinition` instances with labels
// consisting of `Roll` and `Pitch` property values. Also there are customization rule to sort
// instances by `Pitch` property in descending order
const ruleset: Ruleset = {
id: "example",
rules: [{
ruleType: RuleTypes.RootNodes,
specifications: [{
specType: ChildNodeSpecificationTypes.InstanceNodesOfSpecificClasses,
classes: { schemaName: "BisCore", classNames: ["SpatialViewDefinition"] },
groupByClass: false,
groupByLabel: false,
}],
}, {
ruleType: RuleTypes.InstanceLabelOverride,
class: { schemaName: "BisCore", className: "SpatialViewDefinition" },
values: [{
specType: InstanceLabelOverrideValueSpecificationType.Composite,
separator: " x ",
parts: [
{ spec: { specType: InstanceLabelOverrideValueSpecificationType.Property, propertyName: "Roll" } },
{ spec: { specType: InstanceLabelOverrideValueSpecificationType.Property, propertyName: "Pitch" } },
],
}],
}, {
ruleType: RuleTypes.PropertySorting,
propertyName: "Pitch",
sortAscending: false,
}],
};
// __PUBLISH_EXTRACT_END__
printRuleset(ruleset);
// verify that nodes are sorted by `Pitch` in descending order
const nodes = await Presentation.presentation.getNodes({
imodel,
rulesetOrId: ruleset,
});
expect(nodes).to.be.lengthOf(4);
expect(nodes[0]).to.containSubset({ label: { displayValue: "0.00 x 90.00" } });
expect(nodes[1]).to.containSubset({ label: { displayValue: "-90.00 x 0.00" } });
expect(nodes[2]).to.containSubset({ label: { displayValue: "-45.00 x -35.26" } });
expect(nodes[3]).to.containSubset({ label: { displayValue: "-107.42 x -160.99" } });
});
});
});
}); | the_stack |
// IMPORTANT
// This file was generated by https://github.com/Bolisov/google-api-typings-generator. Please do not edit it manually.
// In case of any problems please post issue to https://github.com/Bolisov/google-api-typings-generator
// Generated from: https://logging.googleapis.com/$discovery/rest?version=v2
/// <reference types="gapi.client" />
declare namespace gapi.client {
/** Load Stackdriver Logging API v2 */
function load(name: "logging", version: "v2"): PromiseLike<void>;
function load(name: "logging", version: "v2", callback: () => any): void;
const billingAccounts: logging.BillingAccountsResource;
const entries: logging.EntriesResource;
const folders: logging.FoldersResource;
const monitoredResourceDescriptors: logging.MonitoredResourceDescriptorsResource;
const organizations: logging.OrganizationsResource;
const projects: logging.ProjectsResource;
namespace logging {
interface BucketOptions {
/** The explicit buckets. */
explicitBuckets?: Explicit;
/** The exponential buckets. */
exponentialBuckets?: Exponential;
/** The linear bucket. */
linearBuckets?: Linear;
}
interface Explicit {
/** The values must be monotonically increasing. */
bounds?: number[];
}
interface Exponential {
/** Must be greater than 1. */
growthFactor?: number;
/** Must be greater than 0. */
numFiniteBuckets?: number;
/** Must be greater than 0. */
scale?: number;
}
interface HttpRequest {
/** The number of HTTP response bytes inserted into cache. Set only when a cache fill was attempted. */
cacheFillBytes?: string;
/** Whether or not an entity was served from cache (with or without validation). */
cacheHit?: boolean;
/** Whether or not a cache lookup was attempted. */
cacheLookup?: boolean;
/** Whether or not the response was validated with the origin server before being served from cache. This field is only meaningful if cache_hit is True. */
cacheValidatedWithOriginServer?: boolean;
/** The request processing latency on the server, from the time the request was received until the response was sent. */
latency?: string;
/** Protocol used for the request. Examples: "HTTP/1.1", "HTTP/2", "websocket" */
protocol?: string;
/** The referer URL of the request, as defined in HTTP/1.1 Header Field Definitions (http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html). */
referer?: string;
/** The IP address (IPv4 or IPv6) of the client that issued the HTTP request. Examples: "192.168.1.1", "FE80::0202:B3FF:FE1E:8329". */
remoteIp?: string;
/** The request method. Examples: "GET", "HEAD", "PUT", "POST". */
requestMethod?: string;
/** The size of the HTTP request message in bytes, including the request headers and the request body. */
requestSize?: string;
/**
* The scheme (http, https), the host name, the path and the query portion of the URL that was requested. Example:
* "http://example.com/some/info?color=red".
*/
requestUrl?: string;
/** The size of the HTTP response message sent back to the client, in bytes, including the response headers and the response body. */
responseSize?: string;
/** The IP address (IPv4 or IPv6) of the origin server that the request was sent to. */
serverIp?: string;
/** The response code indicating the status of response. Examples: 200, 404. */
status?: number;
/** The user agent sent by the client. Example: "Mozilla/4.0 (compatible; MSIE 6.0; Windows 98; Q312461; .NET CLR 1.0.3705)". */
userAgent?: string;
}
interface LabelDescriptor {
/** A human-readable description for the label. */
description?: string;
/** The label key. */
key?: string;
/** The type of data that can be assigned to the label. */
valueType?: string;
}
interface Linear {
/** Must be greater than 0. */
numFiniteBuckets?: number;
/** Lower bound of the first bucket. */
offset?: number;
/** Must be greater than 0. */
width?: number;
}
interface ListExclusionsResponse {
/** A list of exclusions. */
exclusions?: LogExclusion[];
/**
* If there might be more results than appear in this response, then nextPageToken is included. To get the next set of results, call the same method again
* using the value of nextPageToken as pageToken.
*/
nextPageToken?: string;
}
interface ListLogEntriesRequest {
/**
* Optional. A filter that chooses which log entries to return. See Advanced Logs Filters. Only log entries that match the filter are returned. An empty
* filter matches all log entries in the resources listed in resource_names. Referencing a parent resource that is not listed in resource_names will cause
* the filter to return no results. The maximum length of the filter is 20000 characters.
*/
filter?: string;
/**
* Optional. How the results should be sorted. Presently, the only permitted values are "timestamp asc" (default) and "timestamp desc". The first option
* returns entries in order of increasing values of LogEntry.timestamp (oldest first), and the second option returns entries in order of decreasing
* timestamps (newest first). Entries with equal timestamps are returned in order of their insert_id values.
*/
orderBy?: string;
/**
* Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of next_page_token in the response
* indicates that more results might be available.
*/
pageSize?: number;
/**
* Optional. If present, then retrieve the next batch of results from the preceding call to this method. page_token must be the value of next_page_token
* from the previous response. The values of other method parameters should be identical to those in the previous call.
*/
pageToken?: string;
/**
* Deprecated. Use resource_names instead. One or more project identifiers or project numbers from which to retrieve log entries. Example:
* "my-project-1A". If present, these project identifiers are converted to resource name format and added to the list of resources in resource_names.
*/
projectIds?: string[];
/**
* Required. Names of one or more parent resources from which to retrieve log entries:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
* Projects listed in the project_ids field are added to this list.
*/
resourceNames?: string[];
}
interface ListLogEntriesResponse {
/**
* A list of log entries. If entries is empty, nextPageToken may still be returned, indicating that more entries may exist. See nextPageToken for more
* information.
*/
entries?: LogEntry[];
/**
* If there might be more results than those appearing in this response, then nextPageToken is included. To get the next set of results, call this method
* again using the value of nextPageToken as pageToken.If a value for next_page_token appears and the entries field is empty, it means that the search
* found no log entries so far but it did not have time to search all the possible log entries. Retry the method with this value for page_token to
* continue the search. Alternatively, consider speeding up the search by changing your filter to specify a single log name or resource type, or to narrow
* the time range of the search.
*/
nextPageToken?: string;
}
interface ListLogMetricsResponse {
/** A list of logs-based metrics. */
metrics?: LogMetric[];
/**
* If there might be more results than appear in this response, then nextPageToken is included. To get the next set of results, call this method again
* using the value of nextPageToken as pageToken.
*/
nextPageToken?: string;
}
interface ListLogsResponse {
/** A list of log names. For example, "projects/my-project/syslog" or "organizations/123/cloudresourcemanager.googleapis.com%2Factivity". */
logNames?: string[];
/**
* If there might be more results than those appearing in this response, then nextPageToken is included. To get the next set of results, call this method
* again using the value of nextPageToken as pageToken.
*/
nextPageToken?: string;
}
interface ListMonitoredResourceDescriptorsResponse {
/**
* If there might be more results than those appearing in this response, then nextPageToken is included. To get the next set of results, call this method
* again using the value of nextPageToken as pageToken.
*/
nextPageToken?: string;
/** A list of resource descriptors. */
resourceDescriptors?: MonitoredResourceDescriptor[];
}
interface ListSinksResponse {
/**
* If there might be more results than appear in this response, then nextPageToken is included. To get the next set of results, call the same method again
* using the value of nextPageToken as pageToken.
*/
nextPageToken?: string;
/** A list of sinks. */
sinks?: LogSink[];
}
interface LogEntry {
/** Optional. Information about the HTTP request associated with this log entry, if applicable. */
httpRequest?: HttpRequest;
/**
* Optional. A unique identifier for the log entry. If you provide a value, then Stackdriver Logging considers other log entries in the same project, with
* the same timestamp, and with the same insert_id to be duplicates which can be removed. If omitted in new log entries, then Stackdriver Logging assigns
* its own unique identifier. The insert_id is also used to order log entries that have the same timestamp value.
*/
insertId?: string;
/** The log entry payload, represented as a structure that is expressed as a JSON object. */
jsonPayload?: Record<string, any>;
/** Optional. A set of user-defined (key, value) data that provides additional information about the log entry. */
labels?: Record<string, string>;
/**
* Required. The resource name of the log to which this log entry belongs:
* "projects/[PROJECT_ID]/logs/[LOG_ID]"
* "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]"
* "folders/[FOLDER_ID]/logs/[LOG_ID]"
* A project number may optionally be used in place of PROJECT_ID. The project number is translated to its corresponding PROJECT_ID internally and the
* log_name field will contain PROJECT_ID in queries and exports.[LOG_ID] must be URL-encoded within log_name. Example:
* "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity". [LOG_ID] must be less than 512 characters long and can only include the
* following characters: upper and lower case alphanumeric characters, forward-slash, underscore, hyphen, and period.For backward compatibility, if
* log_name begins with a forward-slash, such as /projects/..., then the log entry is ingested as usual but the forward-slash is removed. Listing the log
* entry will not show the leading slash and filtering for a log name with a leading slash will never return any results.
*/
logName?: string;
/** Optional. Information about an operation associated with the log entry, if applicable. */
operation?: LogEntryOperation;
/** The log entry payload, represented as a protocol buffer. Some Google Cloud Platform services use this field for their log entry payloads. */
protoPayload?: Record<string, any>;
/** Output only. The time the log entry was received by Stackdriver Logging. */
receiveTimestamp?: string;
/**
* Required. The monitored resource associated with this log entry. Example: a log entry that reports a database error would be associated with the
* monitored resource designating the particular database that reported the error.
*/
resource?: MonitoredResource;
/** Optional. The severity of the log entry. The default value is LogSeverity.DEFAULT. */
severity?: string;
/** Optional. Source code location information associated with the log entry, if any. */
sourceLocation?: LogEntrySourceLocation;
/** The log entry payload, represented as a Unicode string (UTF-8). */
textPayload?: string;
/**
* Optional. The time the event described by the log entry occurred. This time is used to compute the log entry's age and to enforce the logs retention
* period. If this field is omitted in a new log entry, then Stackdriver Logging assigns it the current time.Incoming log entries should have timestamps
* that are no more than the logs retention period in the past, and no more than 24 hours in the future. See the entries.write API method for more
* information.
*/
timestamp?: string;
/**
* Optional. Resource name of the trace associated with the log entry, if any. If it contains a relative resource name, the name is assumed to be relative
* to //tracing.googleapis.com. Example: projects/my-projectid/traces/06796866738c859f2f19b7cfb3214824
*/
trace?: string;
}
interface LogEntryOperation {
/** Optional. Set this to True if this is the first log entry in the operation. */
first?: boolean;
/** Optional. An arbitrary operation identifier. Log entries with the same identifier are assumed to be part of the same operation. */
id?: string;
/** Optional. Set this to True if this is the last log entry in the operation. */
last?: boolean;
/**
* Optional. An arbitrary producer identifier. The combination of id and producer must be globally unique. Examples for producer:
* "MyDivision.MyBigCompany.com", "github.com/MyProject/MyApplication".
*/
producer?: string;
}
interface LogEntrySourceLocation {
/** Optional. Source file name. Depending on the runtime environment, this might be a simple name or a fully-qualified name. */
file?: string;
/**
* Optional. Human-readable name of the function or method being invoked, with optional context such as the class or package name. This information may be
* used in contexts such as the logs viewer, where a file and line number are less meaningful. The format can vary by language. For example:
* qual.if.ied.Class.method (Java), dir/package.func (Go), function (Python).
*/
function?: string;
/** Optional. Line within the source file. 1-based; 0 indicates no line number available. */
line?: string;
}
interface LogExclusion {
/** Optional. A description of this exclusion. */
description?: string;
/**
* Optional. If set to True, then this exclusion is disabled and it does not exclude any log entries. You can use exclusions.patch to change the value of
* this field.
*/
disabled?: boolean;
/**
* Required. An advanced logs filter that matches the log entries to be excluded. By using the sample function, you can exclude less than 100% of the
* matching log entries. For example, the following filter matches 99% of low-severity log entries from load balancers:
* "resource.type=http_load_balancer severity<ERROR sample(insertId, 0.99)"
*/
filter?: string;
/**
* Required. A client-assigned identifier, such as "load-balancer-exclusion". Identifiers are limited to 100 characters and can include only letters,
* digits, underscores, hyphens, and periods.
*/
name?: string;
}
interface LogLine {
/** App-provided log message. */
logMessage?: string;
/** Severity of this log entry. */
severity?: string;
/** Where in the source code this log message was written. */
sourceLocation?: SourceLocation;
/** Approximate time when this log entry was made. */
time?: string;
}
interface LogMetric {
/**
* Optional. The bucket_options are required when the logs-based metric is using a DISTRIBUTION value type and it describes the bucket boundaries used to
* create a histogram of the extracted values.
*/
bucketOptions?: BucketOptions;
/** Optional. A description of this metric, which is used in documentation. */
description?: string;
/**
* Required. An advanced logs filter which is used to match log entries. Example:
* "resource.type=gae_app AND severity>=ERROR"
* The maximum length of the filter is 20000 characters.
*/
filter?: string;
/**
* Optional. A map from a label key string to an extractor expression which is used to extract data from a log entry field and assign as the label value.
* Each label key specified in the LabelDescriptor must have an associated extractor expression in this map. The syntax of the extractor expression is the
* same as for the value_extractor field.The extracted value is converted to the type defined in the label descriptor. If the either the extraction or the
* type conversion fails, the label will have a default value. The default value for a string label is an empty string, for an integer label its 0, and
* for a boolean label its false.Note that there are upper bounds on the maximum number of labels and the number of active time series that are allowed in
* a project.
*/
labelExtractors?: Record<string, string>;
/**
* Optional. The metric descriptor associated with the logs-based metric. If unspecified, it uses a default metric descriptor with a DELTA metric kind,
* INT64 value type, with no labels and a unit of "1". Such a metric counts the number of log entries matching the filter expression.The name, type, and
* description fields in the metric_descriptor are output only, and is constructed using the name and description field in the LogMetric.To create a
* logs-based metric that records a distribution of log values, a DELTA metric kind with a DISTRIBUTION value type must be used along with a
* value_extractor expression in the LogMetric.Each label in the metric descriptor must have a matching label name as the key and an extractor expression
* as the value in the label_extractors map.The metric_kind and value_type fields in the metric_descriptor cannot be updated once initially configured.
* New labels can be added in the metric_descriptor, but existing labels cannot be modified except for their description.
*/
metricDescriptor?: MetricDescriptor;
/**
* Required. The client-assigned metric identifier. Examples: "error_count", "nginx/requests".Metric identifiers are limited to 100 characters and can
* include only the following characters: A-Z, a-z, 0-9, and the special characters _-.,+!*',()%/. The forward-slash character (/) denotes a hierarchy of
* name pieces, and it cannot be the first character of the name.The metric identifier in this field must not be URL-encoded
* (https://en.wikipedia.org/wiki/Percent-encoding). However, when the metric identifier appears as the [METRIC_ID] part of a metric_name API parameter,
* then the metric identifier must be URL-encoded. Example: "projects/my-project/metrics/nginx%2Frequests".
*/
name?: string;
/**
* Optional. A value_extractor is required when using a distribution logs-based metric to extract the values to record from a log entry. Two functions are
* supported for value extraction: EXTRACT(field) or REGEXP_EXTRACT(field, regex). The argument are: 1. field: The name of the log entry field from which
* the value is to be extracted. 2. regex: A regular expression using the Google RE2 syntax (https://github.com/google/re2/wiki/Syntax) with a single
* capture group to extract data from the specified log entry field. The value of the field is converted to a string before applying the regex. It is
* an error to specify a regex that does not include exactly one capture group.The result of the extraction must be convertible to a double type, as the
* distribution always records double values. If either the extraction or the conversion to double fails, then those values are not recorded in the
* distribution.Example: REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")
*/
valueExtractor?: string;
/** Deprecated. The API version that created or updated this metric. The v2 format is used by default and cannot be changed. */
version?: string;
}
interface LogSink {
/**
* Required. The export destination:
* "storage.googleapis.com/[GCS_BUCKET]"
* "bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]"
* "pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]"
* The sink's writer_identity, set when the sink is created, must have permission to write to the destination or else the log entries are not exported.
* For more information, see Exporting Logs With Sinks.
*/
destination?: string;
/** Deprecated. This field is ignored when creating or updating sinks. */
endTime?: string;
/**
* Optional. An advanced logs filter. The only exported log entries are those that are in the resource owning the sink and that match the filter. For
* example:
* logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR
*/
filter?: string;
/**
* Optional. This field applies only to sinks owned by organizations and folders. If the field is false, the default, only the logs owned by the sink's
* parent resource are available for export. If the field is true, then logs from all the projects, folders, and billing accounts contained in the sink's
* parent resource are also available for export. Whether a particular log entry from the children is exported depends on the sink's filter expression.
* For example, if this field is true, then the filter resource.type=gce_instance would export all Compute Engine VM instance log entries from all
* projects in the sink's parent. To only export entries from certain child projects, filter on the project part of the log name:
* logName:("projects/test-project1/" OR "projects/test-project2/") AND
* resource.type=gce_instance
*/
includeChildren?: boolean;
/**
* Required. The client-assigned sink identifier, unique within the project. Example: "my-syslog-errors-to-pubsub". Sink identifiers are limited to 100
* characters and can include only the following characters: upper and lower-case alphanumeric characters, underscores, hyphens, and periods.
*/
name?: string;
/** Deprecated. The log entry format to use for this sink's exported log entries. The v2 format is used by default and cannot be changed. */
outputVersionFormat?: string;
/** Deprecated. This field is ignored when creating or updating sinks. */
startTime?: string;
/**
* Output only. An IAM identity—a service account or group—under which Stackdriver Logging writes the exported log entries to the sink's
* destination. This field is set by sinks.create and sinks.update, based on the setting of unique_writer_identity in those methods.Until you grant this
* identity write-access to the destination, log entry exports from this sink will fail. For more information, see Granting access for a resource. Consult
* the destination service's documentation to determine the appropriate IAM roles to assign to the identity.
*/
writerIdentity?: string;
}
interface MetricDescriptor {
/** A detailed description of the metric, which can be used in documentation. */
description?: string;
/**
* A concise name for the metric, which can be displayed in user interfaces. Use sentence case without an ending period, for example "Request count". This
* field is optional but it is recommended to be set for any metrics associated with user-visible concepts, such as Quota.
*/
displayName?: string;
/**
* The set of labels that can be used to describe a specific instance of this metric type. For example, the
* appengine.googleapis.com/http/server/response_latencies metric type has a label for the HTTP response code, response_code, so you can look at latencies
* for successful responses or just for responses that failed.
*/
labels?: LabelDescriptor[];
/** Whether the metric records instantaneous values, changes to a value, etc. Some combinations of metric_kind and value_type might not be supported. */
metricKind?: string;
/** The resource name of the metric descriptor. */
name?: string;
/**
* The metric type, including its DNS name prefix. The type is not URL-encoded. All user-defined custom metric types have the DNS name
* custom.googleapis.com. Metric types should use a natural hierarchical grouping. For example:
* "custom.googleapis.com/invoice/paid/amount"
* "appengine.googleapis.com/http/server/response_latencies"
*/
type?: string;
/**
* The unit in which the metric value is reported. It is only applicable if the value_type is INT64, DOUBLE, or DISTRIBUTION. The supported units are a
* subset of The Unified Code for Units of Measure (http://unitsofmeasure.org/ucum.html) standard:Basic units (UNIT)
* bit bit
* By byte
* s second
* min minute
* h hour
* d dayPrefixes (PREFIX)
* k kilo (10**3)
* M mega (10**6)
* G giga (10**9)
* T tera (10**12)
* P peta (10**15)
* E exa (10**18)
* Z zetta (10**21)
* Y yotta (10**24)
* m milli (10**-3)
* u micro (10**-6)
* n nano (10**-9)
* p pico (10**-12)
* f femto (10**-15)
* a atto (10**-18)
* z zepto (10**-21)
* y yocto (10**-24)
* Ki kibi (2**10)
* Mi mebi (2**20)
* Gi gibi (2**30)
* Ti tebi (2**40)GrammarThe grammar includes the dimensionless unit 1, such as 1/s.The grammar also includes these connectors:
* / division (as an infix operator, e.g. 1/s).
* . multiplication (as an infix operator, e.g. GBy.d)The grammar for a unit is as follows:
* Expression = Component { "." Component } { "/" Component } ;
*
* Component = [ PREFIX ] UNIT [ Annotation ]
* | Annotation
* | "1"
* ;
*
* Annotation = "{" NAME "}" ;
* Notes:
* Annotation is just a comment if it follows a UNIT and is equivalent to 1 if it is used alone. For examples, {requests}/s == 1/s, By{transmitted}/s ==
* By/s.
* NAME is a sequence of non-blank printable ASCII characters not containing '{' or '}'.
*/
unit?: string;
/** Whether the measurement is an integer, a floating-point number, etc. Some combinations of metric_kind and value_type might not be supported. */
valueType?: string;
}
interface MonitoredResource {
/**
* Required. Values for all of the labels listed in the associated monitored resource descriptor. For example, Compute Engine VM instances use the labels
* "project_id", "instance_id", and "zone".
*/
labels?: Record<string, string>;
/**
* Required. The monitored resource type. This field must match the type field of a MonitoredResourceDescriptor object. For example, the type of a Compute
* Engine VM instance is gce_instance.
*/
type?: string;
}
interface MonitoredResourceDescriptor {
/** Optional. A detailed description of the monitored resource type that might be used in documentation. */
description?: string;
/**
* Optional. A concise name for the monitored resource type that might be displayed in user interfaces. It should be a Title Cased Noun Phrase, without
* any article or other determiners. For example, "Google Cloud SQL Database".
*/
displayName?: string;
/**
* Required. A set of labels used to describe instances of this monitored resource type. For example, an individual Google Cloud SQL database is
* identified by values for the labels "database_id" and "zone".
*/
labels?: LabelDescriptor[];
/**
* Optional. The resource name of the monitored resource descriptor: "projects/{project_id}/monitoredResourceDescriptors/{type}" where {type} is the value
* of the type field in this object and {project_id} is a project ID that provides API-specific context for accessing the type. APIs that do not use
* project information can use the resource name format "monitoredResourceDescriptors/{type}".
*/
name?: string;
/**
* Required. The monitored resource type. For example, the type "cloudsql_database" represents databases in Google Cloud SQL. The maximum length of this
* value is 256 characters.
*/
type?: string;
}
interface RequestLog {
/** App Engine release version. */
appEngineRelease?: string;
/** Application that handled this request. */
appId?: string;
/** An indication of the relative cost of serving this request. */
cost?: number;
/** Time when the request finished. */
endTime?: string;
/** Whether this request is finished or active. */
finished?: boolean;
/**
* Whether this is the first RequestLog entry for this request. If an active request has several RequestLog entries written to Stackdriver Logging, then
* this field will be set for one of them.
*/
first?: boolean;
/** Internet host and port number of the resource being requested. */
host?: string;
/** HTTP version of request. Example: "HTTP/1.1". */
httpVersion?: string;
/** An identifier for the instance that handled the request. */
instanceId?: string;
/**
* If the instance processing this request belongs to a manually scaled module, then this is the 0-based index of the instance. Otherwise, this value is
* -1.
*/
instanceIndex?: number;
/** Origin IP address. */
ip?: string;
/** Latency of the request. */
latency?: string;
/** A list of log lines emitted by the application while serving this request. */
line?: LogLine[];
/** Number of CPU megacycles used to process request. */
megaCycles?: string;
/** Request method. Example: "GET", "HEAD", "PUT", "POST", "DELETE". */
method?: string;
/** Module of the application that handled this request. */
moduleId?: string;
/**
* The logged-in user who made the request.Most likely, this is the part of the user's email before the @ sign. The field value is the same for different
* requests from the same user, but different users can have similar names. This information is also available to the application via the App Engine Users
* API.This field will be populated starting with App Engine 1.9.21.
*/
nickname?: string;
/** Time this request spent in the pending request queue. */
pendingTime?: string;
/** Referrer URL of request. */
referrer?: string;
/**
* Globally unique identifier for a request, which is based on the request start time. Request IDs for requests which started later will compare greater
* as strings than those for requests which started earlier.
*/
requestId?: string;
/**
* Contains the path and query portion of the URL that was requested. For example, if the URL was "http://example.com/app?name=val", the resource would be
* "/app?name=val". The fragment identifier, which is identified by the # character, is not included.
*/
resource?: string;
/** Size in bytes sent back to client by request. */
responseSize?: string;
/**
* Source code for the application that handled this request. There can be more than one source reference per deployed application if source code is
* distributed among multiple repositories.
*/
sourceReference?: SourceReference[];
/** Time when the request started. */
startTime?: string;
/** HTTP response status code. Example: 200, 404. */
status?: number;
/** Task name of the request, in the case of an offline request. */
taskName?: string;
/** Queue name of the request, in the case of an offline request. */
taskQueueName?: string;
/** Stackdriver Trace identifier for this request. */
traceId?: string;
/** File or class that handled the request. */
urlMapEntry?: string;
/** User agent that made the request. */
userAgent?: string;
/** Version of the application that handled this request. */
versionId?: string;
/** Whether this was a loading request for the instance. */
wasLoadingRequest?: boolean;
}
interface SourceLocation {
/** Source file name. Depending on the runtime environment, this might be a simple name or a fully-qualified name. */
file?: string;
/**
* Human-readable name of the function or method being invoked, with optional context such as the class or package name. This information is used in
* contexts such as the logs viewer, where a file and line number are less meaningful. The format can vary by language. For example:
* qual.if.ied.Class.method (Java), dir/package.func (Go), function (Python).
*/
functionName?: string;
/** Line within the source file. */
line?: string;
}
interface SourceReference {
/** Optional. A URI string identifying the repository. Example: "https://github.com/GoogleCloudPlatform/kubernetes.git" */
repository?: string;
/** The canonical and persistent identifier of the deployed revision. Example (git): "0035781c50ec7aa23385dc841529ce8a4b70db1b" */
revisionId?: string;
}
interface WriteLogEntriesRequest {
/**
* Required. The log entries to send to Stackdriver Logging. The order of log entries in this list does not matter. Values supplied in this method's
* log_name, resource, and labels fields are copied into those log entries in this list that do not include values for their corresponding fields. For
* more information, see the LogEntry type.If the timestamp or insert_id fields are missing in log entries, then this method supplies the current time or
* a unique identifier, respectively. The supplied values are chosen so that, among the log entries that did not supply their own values, the entries
* earlier in the list will sort before the entries later in the list. See the entries.list method.Log entries with timestamps that are more than the logs
* retention period in the past or more than 24 hours in the future might be discarded. Discarding does not return an error.To improve throughput and to
* avoid exceeding the quota limit for calls to entries.write, you should try to include several log entries in this list, rather than calling this method
* for each individual log entry.
*/
entries?: LogEntry[];
/**
* Optional. Default labels that are added to the labels field of all log entries in entries. If a log entry already has a label with the same key as a
* label in this parameter, then the log entry's label is not changed. See LogEntry.
*/
labels?: Record<string, string>;
/**
* Optional. A default log resource name that is assigned to all log entries in entries that do not specify a value for log_name:
* "projects/[PROJECT_ID]/logs/[LOG_ID]"
* "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]"
* "folders/[FOLDER_ID]/logs/[LOG_ID]"
* [LOG_ID] must be URL-encoded. For example, "projects/my-project-id/logs/syslog" or
* "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity". For more information about log names, see LogEntry.
*/
logName?: string;
/**
* Optional. Whether valid entries should be written even if some other entries fail due to INVALID_ARGUMENT or PERMISSION_DENIED errors. If any entry is
* not written, then the response status is the error associated with one of the failed entries and the response includes error details keyed by the
* entries' zero-based index in the entries.write method.
*/
partialSuccess?: boolean;
/**
* Optional. A default monitored resource object that is assigned to all log entries in entries that do not specify a value for resource. Example:
* { "type": "gce_instance",
* "labels": {
* "zone": "us-central1-a", "instance_id": "00000000000000000000" }}
* See LogEntry.
*/
resource?: MonitoredResource;
}
interface ExclusionsResource {
/**
* Creates a new exclusion in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions
* in a resource.
*/
create(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Required. The parent resource in which to create the exclusion:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
* Examples: "projects/my-logging-project", "organizations/123456789".
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogExclusion>;
/** Deletes an exclusion. */
delete(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* Required. The resource name of an existing exclusion to delete:
* "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]"
* "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]"
* "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]"
* Example: "projects/my-project-id/exclusions/my-exclusion-id".
*/
name: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<{}>;
/** Gets the description of an exclusion. */
get(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* Required. The resource name of an existing exclusion:
* "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]"
* "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]"
* "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]"
* Example: "projects/my-project-id/exclusions/my-exclusion-id".
*/
name: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogExclusion>;
/** Lists all the exclusions in a parent resource. */
list(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response
* indicates that more results might be available.
*/
pageSize?: number;
/**
* Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from
* the previous response. The values of other method parameters should be identical to those in the previous call.
*/
pageToken?: string;
/**
* Required. The parent resource whose exclusions are to be listed.
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<ListExclusionsResponse>;
/** Changes one or more properties of an existing exclusion. */
patch(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* Required. The resource name of the exclusion to update:
* "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]"
* "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]"
* "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]"
* Example: "projects/my-project-id/exclusions/my-exclusion-id".
*/
name: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. A nonempty list of fields to change in the existing exclusion. New values for the fields are taken from the corresponding fields in the
* LogExclusion included in this request. Fields not mentioned in update_mask are not changed and are ignored in the request.For example, to change the
* filter and description of an exclusion, specify an update_mask of "filter,description".
*/
updateMask?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogExclusion>;
}
interface LogsResource {
/**
* Deletes all the log entries in a log. The log reappears if it receives new entries. Log entries written shortly before the delete operation might not
* be deleted.
*/
delete(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* Required. The resource name of the log to delete:
* "projects/[PROJECT_ID]/logs/[LOG_ID]"
* "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]"
* "folders/[FOLDER_ID]/logs/[LOG_ID]"
* [LOG_ID] must be URL-encoded. For example, "projects/my-project-id/logs/syslog",
* "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity". For more information about log names, see LogEntry.
*/
logName: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<{}>;
/** Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed. */
list(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response
* indicates that more results might be available.
*/
pageSize?: number;
/**
* Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from
* the previous response. The values of other method parameters should be identical to those in the previous call.
*/
pageToken?: string;
/**
* Required. The resource name that owns the logs:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<ListLogsResponse>;
}
interface SinksResource {
/**
* Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's
* writer_identity is not permitted to write to the destination. A sink can export log entries only from the resource owning the sink.
*/
create(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Required. The resource in which to create the sink:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
* Examples: "projects/my-logging-project", "organizations/123456789".
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Optional. Determines the kind of IAM identity returned as writer_identity in the new sink. If this value is omitted or set to false, and if the sink's
* parent is a project, then the value returned as writer_identity is the same group or service account used by Stackdriver Logging before the addition of
* writer identities to this API. The sink's destination must be in the same project as the sink itself.If this field is set to true, or if the sink is
* owned by a non-project resource such as an organization, then the value of writer_identity will be a unique service account used only for exports from
* the new sink. For more information, see writer_identity in LogSink.
*/
uniqueWriterIdentity?: boolean;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogSink>;
/** Deletes a sink. If the sink has a unique writer_identity, then that service account is also deleted. */
delete(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. The full resource name of the sink to delete, including the parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: "projects/my-project-id/sinks/my-sink-id".
*/
sinkName: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<{}>;
/** Gets a sink. */
get(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. The resource name of the sink:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: "projects/my-project-id/sinks/my-sink-id".
*/
sinkName: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogSink>;
/** Lists sinks. */
list(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response
* indicates that more results might be available.
*/
pageSize?: number;
/**
* Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from
* the previous response. The values of other method parameters should be identical to those in the previous call.
*/
pageToken?: string;
/**
* Required. The parent resource whose sinks are to be listed:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<ListSinksResponse>;
/**
* Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: destination, and filter. The updated sink
* might also have a new writer_identity; see the unique_writer_identity field.
*/
patch(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. The full resource name of the sink to update, including the parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: "projects/my-project-id/sinks/my-sink-id".
*/
sinkName: string;
/**
* Optional. See sinks.create for a description of this field. When updating a sink, the effect of this field on the value of writer_identity in the
* updated sink depends on both the old and new values of this field:
* If the old and new values of this field are both false or both true, then there is no change to the sink's writer_identity.
* If the old value is false and the new value is true, then writer_identity is changed to a unique service account.
* It is an error if the old value is true and the new value is set to false or defaulted to false.
*/
uniqueWriterIdentity?: boolean;
/**
* Optional. Field mask that specifies the fields in sink that need an update. A sink field will be overwritten if, and only if, it is in the update mask.
* name and output only fields cannot be updated.An empty updateMask is temporarily treated as using the following mask for backwards compatibility
* purposes: destination,filter,includeChildren At some point in the future, behavior will be removed and specifying an empty updateMask will be an
* error.For a detailed FieldMask definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmaskExample:
* updateMask=filter.
*/
updateMask?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogSink>;
/**
* Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: destination, and filter. The updated sink
* might also have a new writer_identity; see the unique_writer_identity field.
*/
update(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. The full resource name of the sink to update, including the parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: "projects/my-project-id/sinks/my-sink-id".
*/
sinkName: string;
/**
* Optional. See sinks.create for a description of this field. When updating a sink, the effect of this field on the value of writer_identity in the
* updated sink depends on both the old and new values of this field:
* If the old and new values of this field are both false or both true, then there is no change to the sink's writer_identity.
* If the old value is false and the new value is true, then writer_identity is changed to a unique service account.
* It is an error if the old value is true and the new value is set to false or defaulted to false.
*/
uniqueWriterIdentity?: boolean;
/**
* Optional. Field mask that specifies the fields in sink that need an update. A sink field will be overwritten if, and only if, it is in the update mask.
* name and output only fields cannot be updated.An empty updateMask is temporarily treated as using the following mask for backwards compatibility
* purposes: destination,filter,includeChildren At some point in the future, behavior will be removed and specifying an empty updateMask will be an
* error.For a detailed FieldMask definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmaskExample:
* updateMask=filter.
*/
updateMask?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogSink>;
}
interface BillingAccountsResource {
exclusions: ExclusionsResource;
logs: LogsResource;
sinks: SinksResource;
}
interface EntriesResource {
/** Lists log entries. Use this method to retrieve log entries from Stackdriver Logging. For ways to export log entries, see Exporting Logs. */
list(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<ListLogEntriesResponse>;
/**
* Log entry resourcesWrites log entries to Stackdriver Logging. This API method is the only way to send log entries to Stackdriver Logging. This method
* is used, directly or indirectly, by the Stackdriver Logging agent (fluentd) and all logging libraries configured to use Stackdriver Logging.
*/
write(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<{}>;
}
interface ExclusionsResource {
/**
* Creates a new exclusion in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions
* in a resource.
*/
create(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Required. The parent resource in which to create the exclusion:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
* Examples: "projects/my-logging-project", "organizations/123456789".
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogExclusion>;
/** Deletes an exclusion. */
delete(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* Required. The resource name of an existing exclusion to delete:
* "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]"
* "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]"
* "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]"
* Example: "projects/my-project-id/exclusions/my-exclusion-id".
*/
name: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<{}>;
/** Gets the description of an exclusion. */
get(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* Required. The resource name of an existing exclusion:
* "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]"
* "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]"
* "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]"
* Example: "projects/my-project-id/exclusions/my-exclusion-id".
*/
name: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogExclusion>;
/** Lists all the exclusions in a parent resource. */
list(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response
* indicates that more results might be available.
*/
pageSize?: number;
/**
* Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from
* the previous response. The values of other method parameters should be identical to those in the previous call.
*/
pageToken?: string;
/**
* Required. The parent resource whose exclusions are to be listed.
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<ListExclusionsResponse>;
/** Changes one or more properties of an existing exclusion. */
patch(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* Required. The resource name of the exclusion to update:
* "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]"
* "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]"
* "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]"
* Example: "projects/my-project-id/exclusions/my-exclusion-id".
*/
name: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. A nonempty list of fields to change in the existing exclusion. New values for the fields are taken from the corresponding fields in the
* LogExclusion included in this request. Fields not mentioned in update_mask are not changed and are ignored in the request.For example, to change the
* filter and description of an exclusion, specify an update_mask of "filter,description".
*/
updateMask?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogExclusion>;
}
interface LogsResource {
/**
* Deletes all the log entries in a log. The log reappears if it receives new entries. Log entries written shortly before the delete operation might not
* be deleted.
*/
delete(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* Required. The resource name of the log to delete:
* "projects/[PROJECT_ID]/logs/[LOG_ID]"
* "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]"
* "folders/[FOLDER_ID]/logs/[LOG_ID]"
* [LOG_ID] must be URL-encoded. For example, "projects/my-project-id/logs/syslog",
* "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity". For more information about log names, see LogEntry.
*/
logName: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<{}>;
/** Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed. */
list(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response
* indicates that more results might be available.
*/
pageSize?: number;
/**
* Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from
* the previous response. The values of other method parameters should be identical to those in the previous call.
*/
pageToken?: string;
/**
* Required. The resource name that owns the logs:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<ListLogsResponse>;
}
interface SinksResource {
/**
* Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's
* writer_identity is not permitted to write to the destination. A sink can export log entries only from the resource owning the sink.
*/
create(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Required. The resource in which to create the sink:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
* Examples: "projects/my-logging-project", "organizations/123456789".
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Optional. Determines the kind of IAM identity returned as writer_identity in the new sink. If this value is omitted or set to false, and if the sink's
* parent is a project, then the value returned as writer_identity is the same group or service account used by Stackdriver Logging before the addition of
* writer identities to this API. The sink's destination must be in the same project as the sink itself.If this field is set to true, or if the sink is
* owned by a non-project resource such as an organization, then the value of writer_identity will be a unique service account used only for exports from
* the new sink. For more information, see writer_identity in LogSink.
*/
uniqueWriterIdentity?: boolean;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogSink>;
/** Deletes a sink. If the sink has a unique writer_identity, then that service account is also deleted. */
delete(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. The full resource name of the sink to delete, including the parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: "projects/my-project-id/sinks/my-sink-id".
*/
sinkName: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<{}>;
/** Gets a sink. */
get(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. The resource name of the sink:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: "projects/my-project-id/sinks/my-sink-id".
*/
sinkName: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogSink>;
/** Lists sinks. */
list(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response
* indicates that more results might be available.
*/
pageSize?: number;
/**
* Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from
* the previous response. The values of other method parameters should be identical to those in the previous call.
*/
pageToken?: string;
/**
* Required. The parent resource whose sinks are to be listed:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<ListSinksResponse>;
/**
* Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: destination, and filter. The updated sink
* might also have a new writer_identity; see the unique_writer_identity field.
*/
patch(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. The full resource name of the sink to update, including the parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: "projects/my-project-id/sinks/my-sink-id".
*/
sinkName: string;
/**
* Optional. See sinks.create for a description of this field. When updating a sink, the effect of this field on the value of writer_identity in the
* updated sink depends on both the old and new values of this field:
* If the old and new values of this field are both false or both true, then there is no change to the sink's writer_identity.
* If the old value is false and the new value is true, then writer_identity is changed to a unique service account.
* It is an error if the old value is true and the new value is set to false or defaulted to false.
*/
uniqueWriterIdentity?: boolean;
/**
* Optional. Field mask that specifies the fields in sink that need an update. A sink field will be overwritten if, and only if, it is in the update mask.
* name and output only fields cannot be updated.An empty updateMask is temporarily treated as using the following mask for backwards compatibility
* purposes: destination,filter,includeChildren At some point in the future, behavior will be removed and specifying an empty updateMask will be an
* error.For a detailed FieldMask definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmaskExample:
* updateMask=filter.
*/
updateMask?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogSink>;
/**
* Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: destination, and filter. The updated sink
* might also have a new writer_identity; see the unique_writer_identity field.
*/
update(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. The full resource name of the sink to update, including the parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: "projects/my-project-id/sinks/my-sink-id".
*/
sinkName: string;
/**
* Optional. See sinks.create for a description of this field. When updating a sink, the effect of this field on the value of writer_identity in the
* updated sink depends on both the old and new values of this field:
* If the old and new values of this field are both false or both true, then there is no change to the sink's writer_identity.
* If the old value is false and the new value is true, then writer_identity is changed to a unique service account.
* It is an error if the old value is true and the new value is set to false or defaulted to false.
*/
uniqueWriterIdentity?: boolean;
/**
* Optional. Field mask that specifies the fields in sink that need an update. A sink field will be overwritten if, and only if, it is in the update mask.
* name and output only fields cannot be updated.An empty updateMask is temporarily treated as using the following mask for backwards compatibility
* purposes: destination,filter,includeChildren At some point in the future, behavior will be removed and specifying an empty updateMask will be an
* error.For a detailed FieldMask definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmaskExample:
* updateMask=filter.
*/
updateMask?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogSink>;
}
interface FoldersResource {
exclusions: ExclusionsResource;
logs: LogsResource;
sinks: SinksResource;
}
interface MonitoredResourceDescriptorsResource {
/** Lists the descriptors for monitored resource types used by Stackdriver Logging. */
list(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response
* indicates that more results might be available.
*/
pageSize?: number;
/**
* Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from
* the previous response. The values of other method parameters should be identical to those in the previous call.
*/
pageToken?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<ListMonitoredResourceDescriptorsResponse>;
}
interface ExclusionsResource {
/**
* Creates a new exclusion in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions
* in a resource.
*/
create(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Required. The parent resource in which to create the exclusion:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
* Examples: "projects/my-logging-project", "organizations/123456789".
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogExclusion>;
/** Deletes an exclusion. */
delete(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* Required. The resource name of an existing exclusion to delete:
* "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]"
* "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]"
* "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]"
* Example: "projects/my-project-id/exclusions/my-exclusion-id".
*/
name: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<{}>;
/** Gets the description of an exclusion. */
get(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* Required. The resource name of an existing exclusion:
* "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]"
* "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]"
* "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]"
* Example: "projects/my-project-id/exclusions/my-exclusion-id".
*/
name: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogExclusion>;
/** Lists all the exclusions in a parent resource. */
list(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response
* indicates that more results might be available.
*/
pageSize?: number;
/**
* Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from
* the previous response. The values of other method parameters should be identical to those in the previous call.
*/
pageToken?: string;
/**
* Required. The parent resource whose exclusions are to be listed.
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<ListExclusionsResponse>;
/** Changes one or more properties of an existing exclusion. */
patch(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* Required. The resource name of the exclusion to update:
* "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]"
* "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]"
* "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]"
* Example: "projects/my-project-id/exclusions/my-exclusion-id".
*/
name: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. A nonempty list of fields to change in the existing exclusion. New values for the fields are taken from the corresponding fields in the
* LogExclusion included in this request. Fields not mentioned in update_mask are not changed and are ignored in the request.For example, to change the
* filter and description of an exclusion, specify an update_mask of "filter,description".
*/
updateMask?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogExclusion>;
}
interface LogsResource {
/**
* Deletes all the log entries in a log. The log reappears if it receives new entries. Log entries written shortly before the delete operation might not
* be deleted.
*/
delete(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* Required. The resource name of the log to delete:
* "projects/[PROJECT_ID]/logs/[LOG_ID]"
* "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]"
* "folders/[FOLDER_ID]/logs/[LOG_ID]"
* [LOG_ID] must be URL-encoded. For example, "projects/my-project-id/logs/syslog",
* "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity". For more information about log names, see LogEntry.
*/
logName: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<{}>;
/** Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed. */
list(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response
* indicates that more results might be available.
*/
pageSize?: number;
/**
* Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from
* the previous response. The values of other method parameters should be identical to those in the previous call.
*/
pageToken?: string;
/**
* Required. The resource name that owns the logs:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<ListLogsResponse>;
}
interface SinksResource {
/**
* Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's
* writer_identity is not permitted to write to the destination. A sink can export log entries only from the resource owning the sink.
*/
create(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Required. The resource in which to create the sink:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
* Examples: "projects/my-logging-project", "organizations/123456789".
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Optional. Determines the kind of IAM identity returned as writer_identity in the new sink. If this value is omitted or set to false, and if the sink's
* parent is a project, then the value returned as writer_identity is the same group or service account used by Stackdriver Logging before the addition of
* writer identities to this API. The sink's destination must be in the same project as the sink itself.If this field is set to true, or if the sink is
* owned by a non-project resource such as an organization, then the value of writer_identity will be a unique service account used only for exports from
* the new sink. For more information, see writer_identity in LogSink.
*/
uniqueWriterIdentity?: boolean;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogSink>;
/** Deletes a sink. If the sink has a unique writer_identity, then that service account is also deleted. */
delete(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. The full resource name of the sink to delete, including the parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: "projects/my-project-id/sinks/my-sink-id".
*/
sinkName: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<{}>;
/** Gets a sink. */
get(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. The resource name of the sink:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: "projects/my-project-id/sinks/my-sink-id".
*/
sinkName: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogSink>;
/** Lists sinks. */
list(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response
* indicates that more results might be available.
*/
pageSize?: number;
/**
* Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from
* the previous response. The values of other method parameters should be identical to those in the previous call.
*/
pageToken?: string;
/**
* Required. The parent resource whose sinks are to be listed:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<ListSinksResponse>;
/**
* Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: destination, and filter. The updated sink
* might also have a new writer_identity; see the unique_writer_identity field.
*/
patch(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. The full resource name of the sink to update, including the parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: "projects/my-project-id/sinks/my-sink-id".
*/
sinkName: string;
/**
* Optional. See sinks.create for a description of this field. When updating a sink, the effect of this field on the value of writer_identity in the
* updated sink depends on both the old and new values of this field:
* If the old and new values of this field are both false or both true, then there is no change to the sink's writer_identity.
* If the old value is false and the new value is true, then writer_identity is changed to a unique service account.
* It is an error if the old value is true and the new value is set to false or defaulted to false.
*/
uniqueWriterIdentity?: boolean;
/**
* Optional. Field mask that specifies the fields in sink that need an update. A sink field will be overwritten if, and only if, it is in the update mask.
* name and output only fields cannot be updated.An empty updateMask is temporarily treated as using the following mask for backwards compatibility
* purposes: destination,filter,includeChildren At some point in the future, behavior will be removed and specifying an empty updateMask will be an
* error.For a detailed FieldMask definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmaskExample:
* updateMask=filter.
*/
updateMask?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogSink>;
/**
* Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: destination, and filter. The updated sink
* might also have a new writer_identity; see the unique_writer_identity field.
*/
update(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. The full resource name of the sink to update, including the parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: "projects/my-project-id/sinks/my-sink-id".
*/
sinkName: string;
/**
* Optional. See sinks.create for a description of this field. When updating a sink, the effect of this field on the value of writer_identity in the
* updated sink depends on both the old and new values of this field:
* If the old and new values of this field are both false or both true, then there is no change to the sink's writer_identity.
* If the old value is false and the new value is true, then writer_identity is changed to a unique service account.
* It is an error if the old value is true and the new value is set to false or defaulted to false.
*/
uniqueWriterIdentity?: boolean;
/**
* Optional. Field mask that specifies the fields in sink that need an update. A sink field will be overwritten if, and only if, it is in the update mask.
* name and output only fields cannot be updated.An empty updateMask is temporarily treated as using the following mask for backwards compatibility
* purposes: destination,filter,includeChildren At some point in the future, behavior will be removed and specifying an empty updateMask will be an
* error.For a detailed FieldMask definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmaskExample:
* updateMask=filter.
*/
updateMask?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogSink>;
}
interface OrganizationsResource {
exclusions: ExclusionsResource;
logs: LogsResource;
sinks: SinksResource;
}
interface ExclusionsResource {
/**
* Creates a new exclusion in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions
* in a resource.
*/
create(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Required. The parent resource in which to create the exclusion:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
* Examples: "projects/my-logging-project", "organizations/123456789".
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogExclusion>;
/** Deletes an exclusion. */
delete(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* Required. The resource name of an existing exclusion to delete:
* "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]"
* "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]"
* "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]"
* Example: "projects/my-project-id/exclusions/my-exclusion-id".
*/
name: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<{}>;
/** Gets the description of an exclusion. */
get(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* Required. The resource name of an existing exclusion:
* "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]"
* "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]"
* "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]"
* Example: "projects/my-project-id/exclusions/my-exclusion-id".
*/
name: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogExclusion>;
/** Lists all the exclusions in a parent resource. */
list(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response
* indicates that more results might be available.
*/
pageSize?: number;
/**
* Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from
* the previous response. The values of other method parameters should be identical to those in the previous call.
*/
pageToken?: string;
/**
* Required. The parent resource whose exclusions are to be listed.
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<ListExclusionsResponse>;
/** Changes one or more properties of an existing exclusion. */
patch(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* Required. The resource name of the exclusion to update:
* "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]"
* "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]"
* "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]"
* Example: "projects/my-project-id/exclusions/my-exclusion-id".
*/
name: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. A nonempty list of fields to change in the existing exclusion. New values for the fields are taken from the corresponding fields in the
* LogExclusion included in this request. Fields not mentioned in update_mask are not changed and are ignored in the request.For example, to change the
* filter and description of an exclusion, specify an update_mask of "filter,description".
*/
updateMask?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogExclusion>;
}
interface LogsResource {
/**
* Deletes all the log entries in a log. The log reappears if it receives new entries. Log entries written shortly before the delete operation might not
* be deleted.
*/
delete(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* Required. The resource name of the log to delete:
* "projects/[PROJECT_ID]/logs/[LOG_ID]"
* "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]"
* "folders/[FOLDER_ID]/logs/[LOG_ID]"
* [LOG_ID] must be URL-encoded. For example, "projects/my-project-id/logs/syslog",
* "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity". For more information about log names, see LogEntry.
*/
logName: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<{}>;
/** Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed. */
list(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response
* indicates that more results might be available.
*/
pageSize?: number;
/**
* Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from
* the previous response. The values of other method parameters should be identical to those in the previous call.
*/
pageToken?: string;
/**
* Required. The resource name that owns the logs:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<ListLogsResponse>;
}
interface MetricsResource {
/** Creates a logs-based metric. */
create(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* The resource name of the project in which to create the metric:
* "projects/[PROJECT_ID]"
* The new metric must be provided in the request.
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogMetric>;
/** Deletes a logs-based metric. */
delete(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* The resource name of the metric to delete:
* "projects/[PROJECT_ID]/metrics/[METRIC_ID]"
*/
metricName: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<{}>;
/** Gets a logs-based metric. */
get(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* The resource name of the desired metric:
* "projects/[PROJECT_ID]/metrics/[METRIC_ID]"
*/
metricName: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogMetric>;
/** Lists logs-based metrics. */
list(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response
* indicates that more results might be available.
*/
pageSize?: number;
/**
* Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from
* the previous response. The values of other method parameters should be identical to those in the previous call.
*/
pageToken?: string;
/**
* Required. The name of the project containing the metrics:
* "projects/[PROJECT_ID]"
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<ListLogMetricsResponse>;
/** Creates or updates a logs-based metric. */
update(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* The resource name of the metric to update:
* "projects/[PROJECT_ID]/metrics/[METRIC_ID]"
* The updated metric must be provided in the request and it's name field must be the same as [METRIC_ID] If the metric does not exist in [PROJECT_ID],
* then a new metric is created.
*/
metricName: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogMetric>;
}
interface SinksResource {
/**
* Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's
* writer_identity is not permitted to write to the destination. A sink can export log entries only from the resource owning the sink.
*/
create(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Required. The resource in which to create the sink:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
* Examples: "projects/my-logging-project", "organizations/123456789".
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Optional. Determines the kind of IAM identity returned as writer_identity in the new sink. If this value is omitted or set to false, and if the sink's
* parent is a project, then the value returned as writer_identity is the same group or service account used by Stackdriver Logging before the addition of
* writer identities to this API. The sink's destination must be in the same project as the sink itself.If this field is set to true, or if the sink is
* owned by a non-project resource such as an organization, then the value of writer_identity will be a unique service account used only for exports from
* the new sink. For more information, see writer_identity in LogSink.
*/
uniqueWriterIdentity?: boolean;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogSink>;
/** Deletes a sink. If the sink has a unique writer_identity, then that service account is also deleted. */
delete(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. The full resource name of the sink to delete, including the parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: "projects/my-project-id/sinks/my-sink-id".
*/
sinkName: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<{}>;
/** Gets a sink. */
get(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. The resource name of the sink:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: "projects/my-project-id/sinks/my-sink-id".
*/
sinkName: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogSink>;
/** Lists sinks. */
list(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/**
* Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response
* indicates that more results might be available.
*/
pageSize?: number;
/**
* Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from
* the previous response. The values of other method parameters should be identical to those in the previous call.
*/
pageToken?: string;
/**
* Required. The parent resource whose sinks are to be listed:
* "projects/[PROJECT_ID]"
* "organizations/[ORGANIZATION_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]"
* "folders/[FOLDER_ID]"
*/
parent: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<ListSinksResponse>;
/**
* Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: destination, and filter. The updated sink
* might also have a new writer_identity; see the unique_writer_identity field.
*/
patch(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. The full resource name of the sink to update, including the parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: "projects/my-project-id/sinks/my-sink-id".
*/
sinkName: string;
/**
* Optional. See sinks.create for a description of this field. When updating a sink, the effect of this field on the value of writer_identity in the
* updated sink depends on both the old and new values of this field:
* If the old and new values of this field are both false or both true, then there is no change to the sink's writer_identity.
* If the old value is false and the new value is true, then writer_identity is changed to a unique service account.
* It is an error if the old value is true and the new value is set to false or defaulted to false.
*/
uniqueWriterIdentity?: boolean;
/**
* Optional. Field mask that specifies the fields in sink that need an update. A sink field will be overwritten if, and only if, it is in the update mask.
* name and output only fields cannot be updated.An empty updateMask is temporarily treated as using the following mask for backwards compatibility
* purposes: destination,filter,includeChildren At some point in the future, behavior will be removed and specifying an empty updateMask will be an
* error.For a detailed FieldMask definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmaskExample:
* updateMask=filter.
*/
updateMask?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogSink>;
/**
* Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: destination, and filter. The updated sink
* might also have a new writer_identity; see the unique_writer_identity field.
*/
update(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/** Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. */
quotaUser?: string;
/**
* Required. The full resource name of the sink to update, including the parent resource and the sink identifier:
* "projects/[PROJECT_ID]/sinks/[SINK_ID]"
* "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
* "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
* "folders/[FOLDER_ID]/sinks/[SINK_ID]"
* Example: "projects/my-project-id/sinks/my-sink-id".
*/
sinkName: string;
/**
* Optional. See sinks.create for a description of this field. When updating a sink, the effect of this field on the value of writer_identity in the
* updated sink depends on both the old and new values of this field:
* If the old and new values of this field are both false or both true, then there is no change to the sink's writer_identity.
* If the old value is false and the new value is true, then writer_identity is changed to a unique service account.
* It is an error if the old value is true and the new value is set to false or defaulted to false.
*/
uniqueWriterIdentity?: boolean;
/**
* Optional. Field mask that specifies the fields in sink that need an update. A sink field will be overwritten if, and only if, it is in the update mask.
* name and output only fields cannot be updated.An empty updateMask is temporarily treated as using the following mask for backwards compatibility
* purposes: destination,filter,includeChildren At some point in the future, behavior will be removed and specifying an empty updateMask will be an
* error.For a detailed FieldMask definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmaskExample:
* updateMask=filter.
*/
updateMask?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LogSink>;
}
interface ProjectsResource {
exclusions: ExclusionsResource;
logs: LogsResource;
metrics: MetricsResource;
sinks: SinksResource;
}
}
} | the_stack |
import { ChainId } from '@0x/contract-addresses';
import { BlockParam, ContractAddresses, GethCallOverrides } from '@0x/contract-wrappers';
import {
FillQuoteTransformerOrderType,
LimitOrderFields,
RfqOrder,
RfqOrderFields,
Signature,
} from '@0x/protocol-utils';
import { TakerRequestQueryParamsUnnested, V4SignedRfqOrder } from '@0x/quote-server';
import { Fee } from '@0x/quote-server/lib/src/types';
import { BigNumber } from '@0x/utils';
import { AxiosRequestConfig } from 'axios';
import {
ERC20BridgeSource,
GetMarketOrdersOpts,
LiquidityProviderRegistry,
OptimizedMarketOrder,
TokenAdjacencyGraph,
} from './utils/market_operation_utils/types';
export { SamplerMetrics } from './utils/market_operation_utils/types';
import { ExtendedQuoteReportSources, PriceComparisonsReport, QuoteReport } from './utils/quote_report_generator';
import { MetricsProxy } from './utils/quote_requestor';
/**
* expiryBufferMs: The number of seconds to add when calculating whether an order is expired or not. Defaults to 300s (5m).
* permittedOrderFeeTypes: A set of all the takerFee types that OrderPruner will filter for
*/
export interface OrderPrunerOpts {
expiryBufferMs: number;
permittedOrderFeeTypes: Set<OrderPrunerPermittedFeeTypes>;
}
export interface SignedOrder<T> {
order: T;
type: FillQuoteTransformerOrderType.Limit | FillQuoteTransformerOrderType.Rfq;
signature: Signature;
}
export type SignedNativeOrder = SignedOrder<LimitOrderFields> | SignedOrder<RfqOrderFields>;
export type NativeOrderWithFillableAmounts = SignedNativeOrder & NativeOrderFillableAmountFields;
/**
* fillableMakerAmount: Amount of makerAsset that is fillable
* fillableTakerAmount: Amount of takerAsset that is fillable
* fillableTakerFeeAmount: Amount of takerFee paid to fill fillableTakerAmount
*/
export interface NativeOrderFillableAmountFields {
fillableMakerAmount: BigNumber;
fillableTakerAmount: BigNumber;
fillableTakerFeeAmount: BigNumber;
}
/**
* Represents the metadata to call a smart contract with calldata.
* calldataHexString: The hexstring of the calldata.
* toAddress: The contract address to call.
* ethAmount: The eth amount in wei to send with the smart contract call.
* allowanceTarget: The address the taker should grant an allowance to.
* gasOverhead: The gas overhead needed to be added to the gas limit to allow for optional
* operations which may not visible at eth_estimateGas time
*/
export interface CalldataInfo {
calldataHexString: string;
toAddress: string;
ethAmount: BigNumber;
allowanceTarget: string;
gasOverhead: BigNumber;
}
/**
* Interface that varying SwapQuoteConsumers adhere to (exchange consumer, router consumer, forwarder consumer, coordinator consumer)
* getCalldataOrThrow: Get CalldataInfo to swap for tokens with provided SwapQuote. Throws if invalid SwapQuote is provided.
* executeSwapQuoteOrThrowAsync: Executes a web3 transaction to swap for tokens with provided SwapQuote. Throws if invalid SwapQuote is provided.
*/
export interface SwapQuoteConsumerBase {
getCalldataOrThrowAsync(quote: SwapQuote, opts: Partial<SwapQuoteGetOutputOpts>): Promise<CalldataInfo>;
executeSwapQuoteOrThrowAsync(quote: SwapQuote, opts: Partial<SwapQuoteExecutionOpts>): Promise<string>;
}
/**
* chainId: The chainId that the desired orders should be for.
*/
export interface SwapQuoteConsumerOpts {
chainId: number;
contractAddresses?: ContractAddresses;
}
/**
* Represents the options provided to a generic SwapQuoteConsumer
*/
export interface SwapQuoteGetOutputOpts {
extensionContractOpts?: ExchangeProxyContractOpts | any;
}
/**
* ethAmount: The amount of eth sent with the execution of a swap.
* takerAddress: The address to perform the buy. Defaults to the first available address from the provider.
* gasLimit: The amount of gas to send with a transaction (in Gwei). Defaults to an eth_estimateGas rpc call.
*/
export interface SwapQuoteExecutionOpts extends SwapQuoteGetOutputOpts {
ethAmount?: BigNumber;
takerAddress?: string;
gasLimit?: number;
}
export enum AffiliateFeeType {
None,
PercentageFee,
PositiveSlippageFee,
}
export interface AffiliateFeeAmount {
feeType: AffiliateFeeType;
recipient: string;
buyTokenFeeAmount: BigNumber;
sellTokenFeeAmount: BigNumber;
}
/**
* Automatically resolved protocol fee refund receiver addresses.
*/
export enum ExchangeProxyRefundReceiver {
// Refund to the taker address.
Taker = '0x0000000000000000000000000000000000000001',
// Refund to the sender address.
Sender = '0x0000000000000000000000000000000000000002',
}
/**
* @param isFromETH Whether the input token is ETH.
* @param isToETH Whether the output token is ETH.
* @param affiliateFee Fee denominated in taker or maker asset to send to specified recipient.
* @param refundReceiver The receiver of unspent protocol fees.
* May be a valid address or one of:
* `address(0)`: Stay in flash wallet.
* `address(1)`: Send to the taker.
* `address(2)`: Send to the sender (caller of `transformERC20()`).
* @param shouldSellEntireBalance Whether the entire balance of the caller should be sold. Used
* for contracts where the balance at transaction time is different to the quote amount.
* This foregos certain VIP routes which do not support this feature.
*/
export interface ExchangeProxyContractOpts {
isFromETH: boolean;
isToETH: boolean;
affiliateFee: AffiliateFeeAmount;
refundReceiver: string | ExchangeProxyRefundReceiver;
isMetaTransaction: boolean;
shouldSellEntireBalance: boolean;
}
export interface GetExtensionContractTypeOpts {
takerAddress?: string;
ethAmount?: BigNumber;
}
/**
* takerToken: Address of the taker asset.
* makerToken: Address of the maker asset.
* gasPrice: gas price used to determine protocolFee amount, default to ethGasStation fast amount.
* orders: An array of objects conforming to OptimizedMarketOrder. These orders can be used to cover the requested assetBuyAmount plus slippage.
* bestCaseQuoteInfo: Info about the best case price for the asset.
* worstCaseQuoteInfo: Info about the worst case price for the asset.
*/
export interface SwapQuoteBase {
takerToken: string;
makerToken: string;
gasPrice: BigNumber;
orders: OptimizedMarketOrder[];
bestCaseQuoteInfo: SwapQuoteInfo;
worstCaseQuoteInfo: SwapQuoteInfo;
sourceBreakdown: SwapQuoteOrdersBreakdown;
quoteReport?: QuoteReport;
extendedQuoteReportSources?: ExtendedQuoteReportSources;
priceComparisonsReport?: PriceComparisonsReport;
isTwoHop: boolean;
makerTokenDecimals: number;
takerTokenDecimals: number;
takerAmountPerEth: BigNumber;
makerAmountPerEth: BigNumber;
}
/**
* takerAssetFillAmount: The amount of takerAsset sold for makerAsset.
* type: Specified MarketOperation the SwapQuote is provided for
*/
export interface MarketSellSwapQuote extends SwapQuoteBase {
takerTokenFillAmount: BigNumber;
type: MarketOperation.Sell;
}
/**
* makerAssetFillAmount: The amount of makerAsset bought with takerAsset.
* type: Specified MarketOperation the SwapQuote is provided for
*/
export interface MarketBuySwapQuote extends SwapQuoteBase {
makerTokenFillAmount: BigNumber;
type: MarketOperation.Buy;
}
export type SwapQuote = MarketBuySwapQuote | MarketSellSwapQuote;
/**
* feeTakerTokenAmount: The amount of takerAsset reserved for paying takerFees when swapping for desired assets.
* takerTokenAmount: The amount of takerAsset swapped for desired makerAsset.
* totalTakerTokenAmount: The total amount of takerAsset required to complete the swap (filling orders, and paying takerFees).
* makerTokenAmount: The amount of makerAsset that will be acquired through the swap.
* protocolFeeInWeiAmount: The amount of ETH to pay (in WEI) as protocol fee to perform the swap for desired asset.
* gas: Amount of estimated gas needed to fill the quote.
*/
export interface SwapQuoteInfo {
feeTakerTokenAmount: BigNumber;
takerAmount: BigNumber;
totalTakerAmount: BigNumber;
makerAmount: BigNumber;
protocolFeeInWeiAmount: BigNumber;
gas: number;
}
/**
* percentage breakdown of each liquidity source used in quote
*/
export type SwapQuoteOrdersBreakdown = Partial<
{ [key in Exclude<ERC20BridgeSource, typeof ERC20BridgeSource.MultiHop>]: BigNumber } & {
[ERC20BridgeSource.MultiHop]: {
proportion: BigNumber;
intermediateToken: string;
hops: ERC20BridgeSource[];
};
}
>;
/**
* nativeExclusivelyRFQ: if set to `true`, Swap quote will exclude Open Orderbook liquidity.
* If set to `true` and `ERC20BridgeSource.Native` is part of the `excludedSources`
* array in `SwapQuoteRequestOpts`, an Error will be raised.
*/
export interface RfqmRequestOptions extends RfqRequestOpts {
isLastLook: true;
fee: Fee;
}
export interface RfqRequestOpts {
takerAddress: string;
txOrigin: string;
integrator: Integrator;
intentOnFilling: boolean;
isIndicative?: boolean;
makerEndpointMaxResponseTimeMs?: number;
nativeExclusivelyRFQ?: boolean;
altRfqAssetOfferings?: AltRfqMakerAssetOfferings;
isLastLook?: boolean;
fee?: Fee;
}
/**
* gasPrice: gas price to determine protocolFee amount, default to ethGasStation fast amount
*/
export interface SwapQuoteRequestOpts extends Omit<GetMarketOrdersOpts, 'gasPrice'> {
gasPrice?: BigNumber;
rfqt?: RfqRequestOpts;
}
/**
* A mapping from RFQ-T/M quote provider URLs to the trading pairs they support.
* The value type represents an array of supported asset pairs, with each array element encoded as a 2-element array of token addresses.
*/
export interface RfqMakerAssetOfferings {
[endpoint: string]: Array<[string, string]>;
}
export interface AltOffering {
id: string;
baseAsset: string;
quoteAsset: string;
baseAssetDecimals: number;
quoteAssetDecimals: number;
}
export interface AltRfqMakerAssetOfferings {
[endpoint: string]: AltOffering[];
}
export enum RfqPairType {
Standard = 'standard',
Alt = 'alt',
}
export interface TypedMakerUrl {
url: string;
pairType: RfqPairType;
}
export type LogFunction = (obj: object, msg?: string, ...args: any[]) => void;
export interface RfqFirmQuoteValidator {
getRfqtTakerFillableAmountsAsync(quotes: RfqOrder[]): Promise<BigNumber[]>;
}
export interface Integrator {
integratorId: string;
label: string;
whitelistIntegratorUrls?: string[];
}
export interface SwapQuoterRfqOpts {
integratorsWhitelist: Integrator[];
makerAssetOfferings: RfqMakerAssetOfferings;
txOriginBlacklist: Set<string>;
altRfqCreds?: {
altRfqApiKey: string;
altRfqProfile: string;
};
warningLogger?: LogFunction;
infoLogger?: LogFunction;
metricsProxy?: MetricsProxy;
axiosInstanceOpts?: AxiosRequestConfig;
}
export type AssetSwapperContractAddresses = ContractAddresses;
/**
* chainId: The ethereum chain id. Defaults to 1 (mainnet).
* orderRefreshIntervalMs: The interval in ms that getBuyQuoteAsync should trigger an refresh of orders and order states. Defaults to 10000ms (10s).
* expiryBufferMs: The number of seconds to add when calculating whether an order is expired or not. Defaults to 300s (5m).
* contractAddresses: Optionally override the contract addresses used for the chain
* samplerGasLimit: The gas limit used when querying the sampler contract. Defaults to 36e6
*/
export interface SwapQuoterOpts extends OrderPrunerOpts {
chainId: ChainId;
orderRefreshIntervalMs: number;
expiryBufferMs: number;
ethereumRpcUrl?: string;
contractAddresses?: AssetSwapperContractAddresses;
samplerGasLimit?: number;
multiBridgeAddress?: string;
ethGasStationUrl?: string;
rfqt?: SwapQuoterRfqOpts;
samplerOverrides?: SamplerOverrides;
tokenAdjacencyGraph?: TokenAdjacencyGraph;
liquidityProviderRegistry?: LiquidityProviderRegistry;
}
/**
* Possible error messages thrown by an SwapQuoterConsumer instance or associated static methods.
*/
export enum SwapQuoteConsumerError {
InvalidMarketSellOrMarketBuySwapQuote = 'INVALID_MARKET_BUY_SELL_SWAP_QUOTE',
InvalidForwarderSwapQuote = 'INVALID_FORWARDER_SWAP_QUOTE_PROVIDED',
NoAddressAvailable = 'NO_ADDRESS_AVAILABLE',
SignatureRequestDenied = 'SIGNATURE_REQUEST_DENIED',
TransactionValueTooLow = 'TRANSACTION_VALUE_TOO_LOW',
}
/**
* Possible error messages thrown by an SwapQuoter instance or associated static methods.
*/
export enum SwapQuoterError {
NoEtherTokenContractFound = 'NO_ETHER_TOKEN_CONTRACT_FOUND',
StandardRelayerApiError = 'STANDARD_RELAYER_API_ERROR',
InsufficientAssetLiquidity = 'INSUFFICIENT_ASSET_LIQUIDITY',
AssetUnavailable = 'ASSET_UNAVAILABLE',
NoGasPriceProvidedOrEstimated = 'NO_GAS_PRICE_PROVIDED_OR_ESTIMATED',
AssetDataUnsupported = 'ASSET_DATA_UNSUPPORTED',
}
/**
* Represents two main market operations supported by asset-swapper.
*/
export enum MarketOperation {
Sell = 'Sell',
Buy = 'Buy',
}
/**
* Represents varying order takerFee types that can be pruned for by OrderPruner.
*/
export enum OrderPrunerPermittedFeeTypes {
NoFees = 'NO_FEES',
TakerDenominatedTakerFee = 'TAKER_DENOMINATED_TAKER_FEE',
}
/**
* Represents a mocked RFQ-T/M maker responses.
*/
export interface MockedRfqQuoteResponse {
endpoint: string;
requestApiKey: string;
requestParams: TakerRequestQueryParamsUnnested;
responseData: any;
responseCode: number;
callback?: (config: any) => Promise<any>;
}
/**
* Represents a mocked RFQ-T/M alternative maker responses.
*/
export interface AltMockedRfqQuoteResponse {
endpoint: string;
mmApiKey: string;
requestData: AltQuoteRequestData;
responseData: any;
responseCode: number;
}
export interface SamplerOverrides {
overrides: GethCallOverrides;
block: BlockParam;
to?: string;
}
export interface SamplerCallResult {
success: boolean;
data: string;
}
export type Omit<T, K extends keyof T> = Pick<T, Exclude<keyof T, K>>;
export enum AltQuoteModel {
Firm = 'firm',
Indicative = 'indicative',
}
export enum AltQuoteSide {
Buy = 'buy',
Sell = 'sell',
}
export interface AltQuoteRequestData {
market: string;
model: AltQuoteModel;
profile: string;
side: AltQuoteSide;
value?: string;
amount?: string;
meta: {
txOrigin: string;
taker: string;
client: string;
existingOrder?: {
price: string;
value?: string;
amount?: string;
};
};
}
export interface AltBaseRfqResponse extends AltQuoteRequestData {
id: string;
price?: string;
}
export interface AltIndicativeQuoteResponse extends AltBaseRfqResponse {
model: AltQuoteModel.Indicative;
status: 'live' | 'rejected';
}
export interface AltFirmQuoteResponse extends AltBaseRfqResponse {
model: AltQuoteModel.Firm;
data: {
'0xv4order': V4SignedRfqOrder;
};
status: 'active' | 'rejected';
} | the_stack |
import { Log } from '../util/log';
import { EventEmitter } from 'events';
import { ExceptionBreakpoints, IThreadActorProxy } from '../firefox/actorProxy/thread';
import { ConsoleActorProxy } from '../firefox/actorProxy/console';
import { ThreadPauseCoordinator, PauseType } from './threadPause';
import { DelayedTask } from '../util/delayedTask';
import { PendingRequest } from '../util/pendingRequests';
let log = Log.create('ThreadCoordinator');
type ThreadState = 'paused' | 'resuming' | 'running' | 'interrupting' | 'evaluating';
type ThreadTarget = 'paused' | 'running' | 'stepOver' | 'stepIn' | 'stepOut';
/**
* This class manages the state of one
* ["thread"](https://github.com/mozilla/gecko-dev/blob/master/devtools/docs/backend/protocol.md#interacting-with-thread-like-actors)
* and coordinates all tasks that depend on or change the thread's state.
*
* Some tasks (adding and removing breakpoints, fetching stackframes and the values of javascript
* variables) can only be run when the thread is paused. These actions can (and are) run in parallel,
* but the thread must not be resumed until all of them are finished. If such a task is requested
* while the thread is running, it is paused temporarily and resumed automatically when the task
* is finished.
*
* The evaluation of watch expressions is also coordinated using this class: the evaluation tasks
* for the watch expressions are queued, then the thread is paused, then the evaluation tasks are
* run one after another and then the thread is resumed.
* The reason for this is that we want to skip all breakpoints when evaluating watch expressions,
* to do this we need to know if a breakpoint was hit due to a watch expression or not. By running
* the evaluation of watch expressions through this coordinator, we ensure that no other javascript
* code can be running while doing the evaluation.
*/
export class ThreadCoordinator extends EventEmitter {
/**
* whether we should break on uncaught or all exceptions, this setting is sent to Firefox
* with the next request to resume the thread
*/
private exceptionBreakpoints: ExceptionBreakpoints | undefined;
/** the current state of the thread */
private threadState: ThreadState = 'paused';
/**
* the desired state of the thread, the thread will be put into this state when no more actions
* are queued or running
*/
private _threadTarget: ThreadTarget = 'paused';
public get threadTarget(): ThreadTarget {
return this._threadTarget;
}
/** if the thread is paused, this will contain the reason for the pause */
private _threadPausedReason?: FirefoxDebugProtocol.ThreadPausedReason;
public get threadPausedReason(): FirefoxDebugProtocol.ThreadPausedReason | undefined {
return this._threadPausedReason;
}
/**
* if the thread is interrupting (i.e. we sent an interrupt request, but didn't receive
* an answer yet), this will contain a Promise that will be resolved when the thread reached
* the paused state
*/
private interruptPromise?: Promise<void>;
/** the resolve/reject functions for the `interruptPromise` */
private pendingInterruptRequest?: PendingRequest<void>;
/**
* if the thread is resuming (i.e. we sent a resume request, but didn't receive
* an answer yet), this will contain a Promise that will be resolved when the thread reached
* the running state
*/
private resumePromise?: Promise<void>;
/** the resolve/reject functions for the `resumePromise` */
private pendingResumeRequest?: PendingRequest<void>;
private queuedTasksToRunOnPausedThread: DelayedTask<any>[] = [];
private tasksRunningOnPausedThread = 0;
private queuedEvaluateTasks: DelayedTask<FirefoxDebugProtocol.Grip>[] = [];
constructor(
private threadId: number,
private threadName: string,
isPaused: boolean,
private threadActor: IThreadActorProxy,
private consoleActor: ConsoleActorProxy,
private pauseCoordinator: ThreadPauseCoordinator,
private prepareResume: () => Promise<void>
) {
super();
const state = isPaused ? 'paused' : 'running';
this.threadState = state;
this._threadTarget = state;
threadActor.onPaused((event) => {
if (this.threadState === 'evaluating') {
// we hit a breakpoint while evaluating a watch, so we skip it by resuming immediately
threadActor.resume(this.exceptionBreakpoints);
} else if ((event.why.type === 'exception') &&
(this.exceptionBreakpoints === ExceptionBreakpoints.None)) {
// the thread was paused because it hit an exception, but we want to skip that
// (this can happen when the user changed the setting for exception breakpoints
// but we couldn't send the changed setting to Firefox yet)
threadActor.resume(this.exceptionBreakpoints);
} else {
this._threadTarget = 'paused';
this._threadPausedReason = event.why;
this.threadPaused('user');
this.emit('paused', event);
}
});
threadActor.onResumed(() => {
this._threadTarget = 'running';
this._threadPausedReason = undefined;
this.threadResumed();
if (this.tasksRunningOnPausedThread > 0) {
log.warn('Thread resumed unexpectedly while tasks that need the thread to be paused were running');
}
});
}
public setExceptionBreakpoints(exceptionBreakpoints: ExceptionBreakpoints) {
this.exceptionBreakpoints = exceptionBreakpoints;
if ((this.threadState === 'resuming') || (this.threadState === 'running')) {
// We can only send the changed setting for exception breakpoints to Firefox when sending
// a resume request. By requesting to run a dummy action on the paused thread, we ensure
// that a resume request will be sent to Firefox (after the dummy action is finished).
this.runOnPausedThread(async () => undefined);
}
}
public interrupt(): Promise<void> {
if (this.threadState === 'paused') {
return Promise.resolve();
} else if (this.interruptPromise !== undefined) {
return this.interruptPromise;
} else {
this._threadTarget = 'paused';
this._threadPausedReason = undefined;
this.interruptPromise = new Promise<void>((resolve, reject) => {
this.pendingInterruptRequest = { resolve, reject };
});
this.doNext();
return this.interruptPromise;
}
}
public resume(): Promise<void> {
return this.resumeTo('running');
}
public stepOver(): Promise<void> {
return this.resumeTo('stepOver');
}
public stepIn(): Promise<void> {
return this.resumeTo('stepIn');
}
public stepOut(): Promise<void> {
return this.resumeTo('stepOut');
}
private resumeTo(target: 'running' | 'stepOver' | 'stepIn' | 'stepOut'): Promise<void> {
if (this.threadState === 'running') {
if (target !== 'running') {
log.warn(`Can't ${target} because the thread is already running`);
}
return Promise.resolve();
} else if (this.resumePromise !== undefined) {
if (target !== 'running') {
log.warn(`Can't ${target} because the thread is already resuming`);
}
return this.resumePromise;
} else {
this._threadTarget = target;
this._threadPausedReason = undefined;
this.resumePromise = new Promise<void>((resolve, reject) => {
this.pendingResumeRequest = { resolve, reject };
});
this.doNext();
return this.resumePromise;
}
}
public runOnPausedThread<T>(task: () => Promise<T>): Promise<T> {
let delayedTask = new DelayedTask(task);
this.queuedTasksToRunOnPausedThread.push(delayedTask);
this.doNext();
return delayedTask.promise;
}
public evaluate(
expr: string,
frameActorName: string | undefined
): Promise<FirefoxDebugProtocol.Grip> {
let delayedTask = new DelayedTask(() => this.consoleActor.evaluate(expr, frameActorName));
this.queuedEvaluateTasks.push(delayedTask);
this.doNext();
return delayedTask.promise;
}
public onPaused(cb: (event: FirefoxDebugProtocol.ThreadPausedResponse) => void) {
this.on('paused', cb);
}
/**
* This method is called whenever an action was queued or finished running or the desired thread
* state changed. It will decide what should be done next (if anything) and will do it by
* calling one of the `execute` functions below.
*/
private doNext(): void {
if (log.isDebugEnabled()) {
log.debug(`state: ${this.threadState}, target: ${this.threadTarget}, tasks: ${this.tasksRunningOnPausedThread}/${this.queuedTasksToRunOnPausedThread.length}, eval: ${this.queuedEvaluateTasks.length}`)
}
if ((this.threadState === 'interrupting') ||
(this.threadState === 'resuming') ||
(this.threadState === 'evaluating')) {
return;
}
if (this.threadState === 'running') {
if ((this.queuedTasksToRunOnPausedThread.length > 0) || (this.queuedEvaluateTasks.length > 0)) {
this.executeInterrupt('auto');
return;
}
if (this.threadTarget === 'paused') {
this.executeInterrupt('user');
return;
}
} else { // this.threadState === 'paused'
if (this.queuedTasksToRunOnPausedThread.length > 0) {
for (let task of this.queuedTasksToRunOnPausedThread) {
this.executeOnPausedThread(task);
}
this.queuedTasksToRunOnPausedThread = [];
return;
}
if (this.tasksRunningOnPausedThread > 0) {
return;
}
if (this.queuedEvaluateTasks.length > 0) {
let task = this.queuedEvaluateTasks.shift()!;
this.executeEvaluateTask(task);
return;
}
}
if ((this.threadState === 'paused') && (this.threadTarget !== 'paused')) {
this.executeResume();
return;
}
}
/**
* interrupts the thread, the `pauseType` argument specifies if this is due to a breakpoint or
* the user requesting the thread to be paused (so it won't be resumed by this class until the
* user requests it to be resumed) or if it is an automatic pause in order to run some actions
* on the paused thread (so it *will* be resumed automatically when all actions are finished)
*/
private async executeInterrupt(pauseType: PauseType): Promise<void> {
this.threadState = 'interrupting';
try {
await this.pauseCoordinator.requestInterrupt(this.threadId, this.threadName, pauseType);
await this.threadActor.interrupt(pauseType === 'auto');
this.threadPaused(pauseType);
} catch(e) {
log.error(`interrupt failed: ${e}`);
this.threadState = 'running';
this.pauseCoordinator.notifyInterruptFailed(this.threadId, this.threadName);
}
this.interruptPromise = undefined;
this.doNext();
}
private async executeResume(): Promise<void> {
try {
await this.pauseCoordinator.requestResume(this.threadId, this.threadName);
} catch(e) {
log.error(`resume denied: ${e}`);
if (this.pendingResumeRequest !== undefined) {
this.pendingResumeRequest.reject(e);
this.pendingResumeRequest = undefined;
}
this.resumePromise = undefined;
}
let resumeLimit = this.getResumeLimit();
this.threadState = 'resuming';
try {
await this.prepareResume();
await this.threadActor.resume(this.exceptionBreakpoints, resumeLimit);
this.threadResumed();
} catch(e) {
log.error(`resume failed: ${e}`);
this.threadState = 'paused';
this.pauseCoordinator.notifyResumeFailed(this.threadId, this.threadName);
}
this.doNext();
}
private async executeOnPausedThread(task: DelayedTask<any>): Promise<void> {
if (this.threadState !== 'paused') {
log.error(`executeOnPausedThread called but threadState is ${this.threadState}`);
return;
}
this.tasksRunningOnPausedThread++;
try {
await task.execute();
} catch(e) {
log.warn(`task running on paused thread failed: ${e}`);
}
this.tasksRunningOnPausedThread--;
if (this.tasksRunningOnPausedThread === 0) {
this.doNext();
}
}
private async executeEvaluateTask(task: DelayedTask<FirefoxDebugProtocol.Grip>): Promise<void> {
if (this.threadState !== 'paused') {
log.error(`executeEvaluateTask called but threadState is ${this.threadState}`);
return;
}
if (this.tasksRunningOnPausedThread > 0) {
log.error(`executeEvaluateTask called but tasksRunningOnPausedThread is ${this.tasksRunningOnPausedThread}`);
return;
}
this.threadState = 'evaluating';
try {
await task.execute();
} catch(e) {
}
this.threadState = 'paused';
this.doNext();
}
private threadPaused(pauseType: PauseType): void {
this.threadState = 'paused';
if (this.pendingInterruptRequest !== undefined) {
this.pendingInterruptRequest.resolve(undefined);
this.pendingInterruptRequest = undefined;
}
this.interruptPromise = undefined;
if (this.threadTarget === 'paused') {
if (this.pendingResumeRequest !== undefined) {
this.pendingResumeRequest.reject(undefined);
this.pendingResumeRequest = undefined;
}
this.resumePromise = undefined;
}
this.pauseCoordinator.notifyInterrupted(this.threadId, this.threadName, pauseType);
}
private threadResumed(): void {
this.threadState = 'running';
if (this.pendingResumeRequest !== undefined) {
this.pendingResumeRequest.resolve(undefined);
this.pendingResumeRequest = undefined;
}
this.resumePromise = undefined;
if (this.threadTarget !== 'paused') {
if (this.pendingInterruptRequest !== undefined) {
this.pendingInterruptRequest.reject(undefined);
this.pendingInterruptRequest = undefined;
}
this.interruptPromise = undefined;
}
this.pauseCoordinator.notifyResumed(this.threadId, this.threadName);
}
private getResumeLimit(): 'next' | 'step' | 'finish' | undefined {
switch (this.threadTarget) {
case 'stepOver':
return 'next';
case 'stepIn':
return 'step';
case 'stepOut':
return 'finish';
default:
return undefined;
}
}
} | the_stack |
import {
createTaskObserver,
sleep,
TaskObserver,
TaskObserversUnknown,
TaskState,
} from "@adpt/utils";
import * as fs from "fs-extra";
import { last } from "lodash";
import * as path from "path";
import should from "should";
import * as sinon from "sinon";
import { createMockLogger, mochaTmpdir, MockLogger } from "@adpt/testutils";
import Adapt, { AdaptElementOrNull, AdaptMountedElement, FinalDomElement, Group } from "../../src";
import {
Action,
ActOptions,
ChangeType,
Plugin,
PluginManager,
PluginManagerStartOptions,
PluginModule,
PluginOptions,
} from "../../src/deploy";
import * as pluginSupport from "../../src/deploy/plugin_support";
import { MockAdaptContext, mockAdaptContext } from "../../src/ts";
import { createMockDeployment, doBuild, Empty, packageDirs } from "../testlib";
function nextTick(): Promise<void> {
return new Promise((res) => process.nextTick(() => res()));
}
async function doAction(name: string, cb: (op: string) => void) {
await nextTick();
cb(name);
}
class TestPlugin implements Plugin<{}> {
constructor(readonly spy: sinon.SinonSpy) { }
async start(options: PluginOptions) {
this.spy("start", options);
}
async observe(_oldDom: AdaptElementOrNull, dom: AdaptElementOrNull) {
const obs = { test: "object" };
this.spy("observe", dom, obs);
return obs;
}
analyze(_oldDom: AdaptElementOrNull, dom: AdaptElementOrNull, obs: {}): Action[] {
this.spy("analyze", dom, obs);
if (dom == null) throw new Error(`null dom not handled`);
const info = (i: number) => ({
type: ChangeType.create,
detail: `action${i}`,
changes: [{
type: ChangeType.create,
element: dom.props.children[i - 1] as FinalDomElement,
detail: `action${i}`,
}]
});
return [
{ act: () => doAction("action1", this.spy), ...info(1) },
{ act: () => doAction("action2", this.spy), ...info(2) },
];
}
async finish() {
this.spy("finish");
}
}
describe("Plugin Support Basic Tests", () => {
let mgr: PluginManager;
let spy: sinon.SinonSpy;
let logger: MockLogger;
let options: PluginManagerStartOptions;
let dataDir: string;
let taskObserver: TaskObserver;
let dom: AdaptMountedElement;
let kids: AdaptMountedElement[];
let actOptions: ActOptions;
const orig =
<Group>
<Empty id={0} />
<Empty id={1} />
</Group>;
mochaTmpdir.all("adapt-plugin-tests");
beforeEach(async () => {
const build = await doBuild(orig);
dom = build.dom;
kids = dom.props.children;
spy = sinon.spy();
logger = createMockLogger();
taskObserver = createTaskObserver("parent", { logger });
taskObserver.started();
const registered = new Map<string, PluginModule>();
registered.set("TestPlugin", {
name: "TestPlugin",
module,
create: () => new TestPlugin(spy),
packageName: "test_plugin",
version: "1.0.0",
});
mgr = pluginSupport.createPluginManager(registered);
dataDir = path.join(process.cwd(), "pluginData");
const deployment = await createMockDeployment({ deployID: "deploy123"});
options = {
dataDir,
deployment,
deployOpID: await deployment.newOpID(),
logger,
newDom: dom,
newMountedElements: build.mountedElements,
prevDom: null,
prevMountedElements: [],
};
actOptions = {
taskObserver,
};
});
function getTasks(): TaskObserversUnknown {
return (taskObserver.childGroup() as any).tasks_;
}
it("Should construct a PluginManager", () => {
should(mgr).not.Undefined();
});
it("Should call start on each plugin", async () => {
await mgr.start(options);
should(spy.calledOnce).True();
should(spy.getCall(0).args[0]).eql("start");
should(spy.getCall(0).args[1].deployID).eql("deploy123");
should(spy.getCall(0).args[1].dataDir)
.eql(path.join(dataDir, "test_plugin@1.0.0", "TestPlugin"));
});
it("Should create plugin data directory", async () => {
await mgr.start(options);
should(spy.calledOnce).True();
should(spy.getCall(0).args[0]).eql("start");
const expected = path.join(dataDir, "test_plugin@1.0.0", "TestPlugin");
should(spy.getCall(0).args[1].dataDir).equal(expected);
should(fs.existsSync(expected)).be.True();
});
it("Should call observe after start", async () => {
await mgr.start(options);
await mgr.observe();
should(spy.callCount).equal(2);
should(spy.getCall(0).args[0]).eql("start");
should(spy.getCall(0).args[1].deployID).eql("deploy123");
should(spy.getCall(1).args).eql(["observe", dom, { test: "object" }]);
});
it("Should call analyze after observe", async () => {
await mgr.start(options);
await mgr.observe();
mgr.analyze();
should(spy.callCount).equal(3);
should(spy.getCall(0).args[0]).eql("start");
should(spy.getCall(0).args[1].deployID).eql("deploy123");
should(spy.getCall(1).args).eql(["observe", dom, { test: "object" }]);
should(spy.getCall(2).args).eql(["analyze", dom, { test: "object" }]);
const tasks = getTasks();
const taskNames = Object.keys(tasks);
should(taskNames).have.length(0);
});
it("Should call actions", async () => {
await mgr.start(options);
await mgr.observe();
mgr.analyze();
await mgr.act(actOptions);
await mgr.finish();
should(spy.callCount).equal(6);
should(spy.getCall(0).args[0]).eql("start");
should(spy.getCall(0).args[1].deployID).eql("deploy123");
should(spy.getCall(1).args).eql(["observe", dom, { test: "object" }]);
should(spy.getCall(2).args).eql(["analyze", dom, { test: "object" }]);
// The two actions can be called in either order
should([spy.getCall(3).args, spy.getCall(4).args])
.containDeep([ ["action1"], ["action2"] ]);
should(spy.getCall(5).args).eql(["finish"]);
const contents = logger.stdout;
should(contents).match(/action1/);
should(contents).match(/action2/);
const tasks = getTasks();
const taskNames = Object.keys(tasks);
should(taskNames).containDeep(kids.map((e) => e.id));
should(taskNames.map((n) => tasks[n]!.description))
.containDeep(["action1", "action2"]);
should(taskNames.map((n) => tasks[n]!.state))
.eql([TaskState.Complete, TaskState.Complete, TaskState.Complete]);
});
it("Should not call actions on dry run", async () => {
await mgr.start(options);
await mgr.observe();
mgr.analyze();
await mgr.act({ ...actOptions, dryRun: true });
await mgr.finish();
should(spy.callCount).equal(4);
should(spy.getCall(0).args[0]).eql("start");
should(spy.getCall(0).args[1].deployID).eql("deploy123");
should(spy.getCall(1).args).eql(["observe", dom, { test: "object" }]);
should(spy.getCall(2).args).eql(["analyze", dom, { test: "object" }]);
should(spy.getCall(3).args).eql(["finish"]);
const contents = logger.stdout;
should(contents).match(/action1/);
should(contents).match(/action2/);
const tasks = getTasks();
const taskNames = Object.keys(tasks);
should(taskNames).containDeep(kids.map((e) => e.id));
should(taskNames.map((n) => tasks[n]!.description))
.containDeep(["action1", "action2"]);
should(taskNames.map((n) => tasks[n]!.state))
.eql([TaskState.Skipped, TaskState.Skipped, TaskState.Skipped]);
});
it("Should not allow illegal call sequences", async () => {
await mgr.start(options);
should(() => mgr.analyze()).throwError(
/Illegal call to Plugin Manager, attempting to go from PreObserve to Analyzing/);
await should(mgr.act(actOptions)).rejectedWith(Error);
await should(mgr.finish()).rejectedWith(Error);
await mgr.observe();
await should(mgr.act(actOptions)).rejectedWith(Error);
mgr.analyze();
await mgr.act({ ...actOptions, dryRun: true });
taskObserver.complete();
await should(mgr.act(actOptions)).rejectedWith(/new TaskObserver must be provided/);
taskObserver = createTaskObserver("parent2", { logger });
taskObserver.started();
actOptions.taskObserver = taskObserver;
await should(mgr.act(actOptions)).rejectedWith(Error);
mgr.analyze();
await mgr.act(actOptions);
await mgr.finish();
});
it("Should allow finish without acting", async () => {
await mgr.start(options);
await mgr.observe();
mgr.analyze();
await mgr.finish();
});
it("Should run actions after dry run", async () => {
await mgr.start(options);
await mgr.observe();
mgr.analyze();
await mgr.act({ ...actOptions, dryRun: true });
should(spy.callCount).equal(3);
should(spy.getCall(0).args[0]).eql("start");
should(spy.getCall(0).args[1].deployID).eql("deploy123");
should(spy.getCall(1).args).eql(["observe", dom, { test: "object" }]);
should(spy.getCall(2).args).eql(["analyze", dom, { test: "object" }]);
const contents = logger.stdout;
should(contents).match(/action1/);
should(contents).match(/action2/);
const tasks = getTasks();
let taskNames = Object.keys(tasks);
should(taskNames).containDeep(kids.map((e) => e.id));
should(taskNames.map((n) => tasks[n]!.description))
.containDeep(["action1", "action2"]);
should(taskNames.map((n) => tasks[n]!.state))
.eql([TaskState.Skipped, TaskState.Skipped, TaskState.Skipped]);
// Provide a new taskObserver for the second act()
taskObserver = createTaskObserver("parent2", { logger });
taskObserver.started();
actOptions.taskObserver = taskObserver;
mgr.analyze();
should(spy.getCall(3).args).eql(["analyze", dom, { test: "object" }]);
await mgr.act(actOptions);
await mgr.finish();
should(spy.callCount).equal(7);
// The two actions can be called in either order
should([spy.getCall(4).args, spy.getCall(5).args])
.containDeep([ ["action1"], ["action2"] ]);
should(spy.getCall(6).args).eql(["finish"]);
const newTasks = getTasks();
should(newTasks).not.equal(tasks);
taskNames = Object.keys(newTasks);
should(taskNames).containDeep(kids.map((e) => e.id));
should(taskNames.map((n) => newTasks[n]!.description))
.containDeep(["action1", "action2"]);
should(taskNames.map((n) => newTasks[n]!.state))
.eql([TaskState.Complete, TaskState.Complete, TaskState.Complete]);
});
});
class Concurrent {
concurrent = 0;
maxConcurrent = 0;
inc() {
if (++this.concurrent > this.maxConcurrent) this.maxConcurrent = this.concurrent;
}
dec() {
--this.concurrent;
}
}
class SlowPlugin implements Plugin<{}> {
local = new Concurrent();
constructor(
public seriesActions: boolean,
readonly spy: sinon.SinonSpy,
public shared: Concurrent,
public elemStart = 0,
) { }
async start(options: PluginOptions) {/**/}
async observe(_oldDom: AdaptElementOrNull, dom: AdaptElementOrNull) {
return {};
}
act = async () => {
this.local.inc();
this.shared.inc();
await sleep(20);
this.local.dec();
this.shared.dec();
}
analyze(_oldDom: AdaptElementOrNull, dom: AdaptElementOrNull, _obs: {}): Action[] {
if (!dom) throw new Error(`Test error: dom is null`);
const elems = [ dom, ...dom.props.children ].slice(this.elemStart);
const info = () => ({
type: ChangeType.create,
detail: "action detail",
changes: [{
type: ChangeType.create,
element: elems.shift() as FinalDomElement,
detail: "change detail"
}]
});
return [
{ ...info(), act: this.act },
{ ...info(), act: this.act },
{ ...info(), act: this.act },
];
}
async finish() {
this.spy("max", this.local.maxConcurrent);
}
}
describe("Plugin concurrency", () => {
let mgr: PluginManager;
let logger: MockLogger;
let options: PluginManagerStartOptions;
let dataDir: string;
let registered: Map<string, PluginModule>;
let shared: Concurrent;
let dom: AdaptMountedElement;
let actOptions: ActOptions;
const orig =
<Group>
<Group /><Group /><Group /><Group />
<Group /><Group /><Group /><Group />
</Group>;
mochaTmpdir.all("adapt-plugin-tests");
beforeEach(async () => {
const build = await doBuild(orig);
dom = build.dom;
logger = createMockLogger();
registered = new Map<string, PluginModule>();
shared = new Concurrent();
dataDir = path.join(process.cwd(), "pluginData");
const deployment = await createMockDeployment({ deployID: "deploy123"});
options = {
dataDir,
deployment,
deployOpID: await deployment.newOpID(),
logger,
newDom: dom,
newMountedElements: build.mountedElements,
prevDom: null,
prevMountedElements: [],
};
actOptions = {
taskObserver: createTaskObserver("parent", { logger }),
};
actOptions.taskObserver.started();
});
it("Should act in parallel", async () => {
const spy = sinon.spy();
registered.set("SlowPlugin", {
name: "SlowPlugin",
module,
create: () => new SlowPlugin(false, spy, shared, 1),
packageName: "slow_plugin",
version: "1.0.0",
});
mgr = pluginSupport.createPluginManager(registered);
await mgr.start(options);
await mgr.observe();
mgr.analyze();
await mgr.act(actOptions);
await mgr.finish();
should(spy.callCount).equal(1);
should(spy.getCall(0).args[0]).eql("max");
should(spy.getCall(0).args[1]).eql(3);
});
});
let testPluginsLoaded: string[] = [];
function testPluginSrcDir(name: string) {
return path.join(packageDirs.root, "test_plugins", name);
}
async function setupTestPlugin(name: string) {
const srcDir = testPluginSrcDir(name);
const modDir = path.join(packageDirs.dist, "test_plugins", name);
await fs.ensureSymlink(
path.join(srcDir, "package.json"), path.join(modDir, "package.json"));
return modDir;
}
async function requireTestPlugin(name: string, jsFile = "index.js") {
const modDir = await setupTestPlugin(name);
const jsPath = path.resolve(path.join(modDir, jsFile));
await require(jsPath);
testPluginsLoaded.push(jsPath);
}
function cleanupTestPlugins() {
for (const p of testPluginsLoaded) {
delete require.cache[p];
}
testPluginsLoaded = [];
}
function outputLines(logger: MockLogger): string[] {
const stdout = logger.stdout;
const lines = stdout.split("\n");
const l = last(lines);
if (l === "") lines.pop();
return lines;
}
describe("Plugin register and deploy", () => {
let logger: MockLogger;
let mockContext: MockAdaptContext;
let options: PluginManagerStartOptions;
const orig = <Group />;
beforeEach(async () => {
cleanupTestPlugins();
mockContext = mockAdaptContext();
logger = createMockLogger();
const build = await doBuild(orig);
const deployment = await createMockDeployment({ deployID: "deploy123"});
options = {
dataDir: "/tmp/fakeDataDir",
deployment,
deployOpID: await deployment.newOpID(),
logger,
newDom: build.dom,
newMountedElements: build.mountedElements,
prevDom: null,
prevMountedElements: [],
};
});
afterEach(() => {
mockContext.stop();
});
after(() => {
cleanupTestPlugins();
});
it("Should register plugin", async () => {
await requireTestPlugin("echo_plugin");
should(mockContext.pluginModules).size(1);
const mgr = pluginSupport.createPluginManager(mockContext.pluginModules);
await mgr.start(options);
const lines = outputLines(logger);
should(lines).have.length(1);
should(lines[0]).match(/EchoPlugin: start/);
});
it("Should error if no plugins registered", () => {
should(() => pluginSupport.createPluginConfig(mockContext.pluginModules))
.throw(/No plugins registered/);
});
it("Should throw on registering same name, different create", async () => {
await requireTestPlugin("echo_plugin");
return should(requireTestPlugin("echo_plugin", "error.js"))
.be.rejectedWith(
/Attempt to register two plugins with the same name from the same package: echo \[echo_plugin@1.0.0]/);
});
it("Should register two plugins from same package", async () => {
await requireTestPlugin("echo_plugin");
await requireTestPlugin("echo_plugin", "second.js");
should(mockContext.pluginModules).size(2);
const mgr = pluginSupport.createPluginManager(mockContext.pluginModules);
await mgr.start(options);
const lines = outputLines(logger);
should(lines).have.length(2);
should(lines[0]).match(/EchoPlugin: start/);
should(lines[1]).match(/EchoPlugin: start/);
});
it("Should ignore second registration with same info", async () => {
await requireTestPlugin("echo_plugin");
await requireTestPlugin("echo_plugin", "duplicate.js");
should(mockContext.pluginModules).size(1);
const mgr = pluginSupport.createPluginManager(mockContext.pluginModules);
await mgr.start(options);
const lines = outputLines(logger);
should(lines).have.length(1);
should(lines[0]).match(/EchoPlugin: start/);
});
}); | the_stack |
export interface WorkerContextMethods {
/** Prevents requests from failing due to an unhandled exception thrown by the Worker, causing it instead to “fail open”.
*
* Instead of returning an error response, the runtime will proxy the request to the origin server as though the Worker was never invoked. */
passThroughOnException(): void;
/** Extend the lifetime of the event without blocking the response from being sent.
*
* Use this method to notify the runtime to wait for tasks (e.g. logging, analytics to third-party services, streaming and caching)
* that need to run longer than the usual time it takes to send a response. */
waitUntil(promise: Promise<unknown>): void;
}
export interface ScheduledEventProperties {
/** The time the ScheduledEvent was scheduled to be executed in milliseconds since January 1, 1970, UTC.
*
* It can be parsed as new Date(event.scheduledTime) */
readonly scheduledTime: number;
/** The original cron string that the event was scheduled for. */
readonly cron: string;
}
export interface IncomingRequestCf extends Request {
/** An object containing properties about the incoming request provided by Cloudflare’s edge network. */
readonly cf: IncomingRequestCfProperties;
// undocumented
readonly fetcher: Record<string, unknown>; // ???
}
/** An object containing properties about the incoming request provided by Cloudflare’s edge network. */
export interface IncomingRequestCfProperties {
// https://developers.cloudflare.com/workers/runtime-apis/request#incomingrequestcfproperties
/** ASN of the incoming request, e.g. 395747. */
readonly asn: number;
/** The organisation which owns the ASN of the incoming request, e.g. Google Cloud. */
readonly asOrganization: string;
/** The three-letter IATA airport code of the data center that the request hit, e.g. "DFW". */
readonly colo: string;
/** Country of the incoming request.
*
* The two-letter country code in the request. This is the same value as that provided in the CF-IPCountry header, e.g. "US". */
readonly country: string | null;
/** HTTP Protocol, e.g. "HTTP/2". */
readonly httpProtocol: string;
/** The browser-requested prioritization information in the request object, e.g. "weight=192;exclusive=0;group=3;group-weight=127". */
readonly requestPriority: string | null;
/** The cipher for the connection to Cloudflare, e.g. "AEAD-AES128-GCM-SHA256". */
readonly tlsCipher: string;
/** Only set when using Cloudflare Access or API Shield. */
readonly tlsClientAuth: TlsClientAuth | null;
/** The TLS version of the connection to Cloudflare, e.g. TLSv1.3. */
readonly tlsVersion: string;
// 2021-04-13: these are now free! https://blog.cloudflare.com/location-based-personalization-using-workers/
/** City of the incoming request, e.g. "Austin". */
readonly city: string | null;
/** Continent of the incoming request, e.g. "NA". */
readonly continent: string | null;
/** Latitude of the incoming request, e.g. "30.27130". */
readonly latitude: string | null;
/** Longitude of the incoming request, e.g. "-97.74260". */
readonly longitude: string | null;
/** Postal code of the incoming request, e.g. "78701". */
readonly postalCode: string | null;
/** Metro code (DMA) of the incoming request, e.g. "635". */
readonly metroCode: string | null;
/** If known, the [ISO 3166-2](https://en.wikipedia.org/wiki/ISO_3166-2) name for the first level region associated with the IP address of the incoming request, e.g. "Texas". */
readonly region: string | null;
/** If known, the [ISO 3166-2](https://en.wikipedia.org/wiki/ISO_3166-2) code for the first level region associated with the IP address of the incoming request, e.g. "TX". */
readonly regionCode: string | null;
/** Timezone of the incoming request, e.g. "America/Chicago". */
readonly timezone: string;
// undocumented
readonly edgeRequestKeepAliveStatus: number; // e.g. 1
readonly clientAcceptEncoding: string; // e.g. gzip, deflate, br
readonly clientTcpRtt: number; // e.g. 35
readonly weight: string; // e.g. "UNDEFINED" (free only?)
readonly tlsExportedAuthenticator: TlsExportedAuthenticator;
}
export interface TlsExportedAuthenticator {
readonly clientFinished: string; // e.g. e138d272eedf10ff081b2614c40c22e2a2e1c272aeef6e9ba8517dd19e4908c4
readonly clientHandshake: string;
readonly serverHandshake: string;
readonly serverFinished: string;
}
export interface TlsClientAuth {
readonly certIssuerDNLegacy: string,
readonly certIssuerDN: string,
readonly certIssuerDNRFC2253: string,
readonly certSubjectDNLegacy: string,
readonly certVerified: string,
readonly certNotAfter: string,
readonly certSubjectDN: string,
readonly certFingerprintSHA1: string,
readonly certNotBefore: string,
readonly certSerial: string,
readonly certPresented: string,
readonly certSubjectDNRFC2253: string,
// undocumented
readonly certFingerprintSHA256: string;
readonly certIssuerSKI: string;
readonly certRevoked: string;
readonly certSKI: string;
}
//#endregion
//#region Business and Enterprise only
export interface IncomingRequestCfBusinessAndEnterpriseProperties extends IncomingRequestCfProperties {
// undocumented
readonly clientTrustScore: number; // e.g. 99
readonly botManagement: BotManagement;
}
export interface BotManagement {
readonly score: number; // e.g. 99
readonly verifiedBot: boolean;
readonly staticResource: boolean;
}
//#endregion
//#region Script (non-module) worker types
// https://developers.cloudflare.com/workers/runtime-apis/fetch-event
export interface FetchEvent extends WorkerContextMethods {
/** The type of event. */
readonly type: 'fetch';
/** The incoming HTTP request triggering FetchEvent. */
readonly request: IncomingRequestCf;
/** Intercept the request and send a custom response.
*
* If no event handler calls respondWith() the runtime attempts to request the origin as if no Worker script exists.
* If no origin is setup (e.g. workers.dev sites), then the Workers script must call respondWith() for a valid response. */
respondWith(promise: Response | Promise<Response>): void;
}
// https://developers.cloudflare.com/workers/runtime-apis/scheduled-event
export interface ScheduledEvent extends ScheduledEventProperties {
/** The type of event. */
readonly type: 'scheduled';
/** Use this method to notify the runtime to wait for asynchronous tasks (e.g. logging, analytics to third-party services, streaming and caching).
*
* The first event.waitUntil to fail will be observed and recorded as the status in the Cron Trigger Past Events table. Otherwise, it will be reported as a Success.
*
* 15 minute wall time limit, in addition to the existing CPU time limit */
waitUntil(promise: Promise<unknown>): void;
}
//#endregion
//#region ES Module worker types
/*
export default {
fetch(request: IncomingRequestCf, env: MyWorkerEnv, ctx: ModuleWorkerContext): Promise<Response>;
scheduled(event: ModuleWorkerScheduledEvent, env: MyWorkerEnv, ctx: ModuleWorkerContext): Promise<void>;
};
*/
// deno-lint-ignore no-empty-interface
export interface ModuleWorkerContext extends WorkerContextMethods {
}
// deno-lint-ignore no-empty-interface
export interface ModuleWorkerScheduledEvent extends ScheduledEventProperties {
}
//#endregion
//#region Workers KV https://developers.cloudflare.com/workers/runtime-apis/kv
export interface KVGetOptions {
// https://developers.cloudflare.com/workers/runtime-apis/kv#cache-ttl
/** The cacheTtl parameter must be an integer that is greater than or equal to 60.
*
* It defines the length of time in seconds that a KV result is cached in the edge location that it is accessed from.
* This can be useful for reducing cold read latency on keys that are read relatively infrequently.
* It is especially useful if your data is write-once or write-rarely, but is not recommended if your data is updated often
* and you need to see updates shortly after they're written, because writes that happen from other edge locations
* won't be visible until the cached value expires.
*
* The effective Cache TTL of an already cached item can be reduced by getting it again it with a lower cacheTtl.
* For example, if you did NAMESPACE.get(key, {cacheTtl: 86400}) but later realized that caching for 24 hours was too long,
* you could NAMESPACE.get(key, {cacheTtl: 300}) or even NAMESPACE.get(key) and it would check for newer data to respect
* the provided cacheTtl, which defaults to 60. */
readonly cacheTtl?: number;
}
/** Many common uses of Workers KV involve writing keys that are only meant to be valid for a certain amount of time.
*
* Rather than requiring applications to remember to delete such data at the appropriate time, Workers KV offers the ability to create keys that automatically expire,
* either at a particular point in time or after a certain amount of time has passed since the key was last modified.
*
* Once the expiration time of an expiring key is reached, it will be deleted from the system. After its deletion, attempts to read it will behave as if the key does not exist,
* and it will not count against the namespace’s storage usage for billing purposes.
*
* Note that expiration times of less than 60 seconds in the future or expiration TTLs of less than 60 seconds are not supported at this time. */
export interface KVPutOptions {
/** Absolute expiration time specified in a number of seconds since the UNIX epoch.
*
* For example, if you wanted a key to expire at 12:00AM UTC on April 1, 2019, you would set the key’s expiration to 1554076800. */
readonly expiration?: number;
/** Expiration TTL (time to live), using a relative number of seconds from the current time.
*
* For example, if you wanted a key to expire 10 minutes after creating it, you would set its expiration TTL to 600. */
readonly expirationTtl?: number;
/** Metadata to associate with the key-value pair.
*
* The serialized JSON representation of the metadata object must be no more than 1024 bytes in length. */
readonly metadata?: Record<string, unknown>;
}
export interface KVListOptions {
/** A prefix you can use to filter all keys. */
readonly prefix?: string;
/** The maximum number of keys returned. The default is 1000, which is the maximum.
*
* It is unlikely that you will want to change this default, but it is included for completeness. */
readonly limit?: number;
/** A string used for paginating responses. */
readonly cursor?: string;
}
export interface KVListResultKey {
/** The name of the key. */
readonly name: string;
/** The expiration value will only be returned if the key has an expiration, and will be in the absolute value form, even if it was set in the TTL form. */
readonly expiration?: number;
/** Metadata will only be returned if the given key has non-null associated metadata. */
readonly metadata?: Record<string, unknown>;
}
export interface KVListResult {
/** An array of objects describing each key.
*
* Keys are always returned in lexicographically sorted order according to their UTF-8 bytes. */
readonly keys: KVListResultKey[];
}
export interface KVListCompleteResult extends KVListResult {
/** No more keys to fetch. */
// deno-lint-ignore camelcase
readonly list_complete: true;
}
export interface KVListIncompleteResult extends KVListResult {
/** If list_complete is false, there are more keys to fetch. */
// deno-lint-ignore camelcase
readonly list_complete: false;
/** Used in subsequent list call. */
readonly cursor: string;
}
export interface KVValueAndMetadata<T> {
readonly metadata: Record<string, unknown> | null;
readonly value: T;
}
export interface KVNamespace {
// https://developers.cloudflare.com/workers/runtime-apis/kv#writing-key-value-pairs
/** Creates a new key-value pair, or updates the value for a particular key.
*
* This method returns a Promise that you should await on in order to verify a successful update.
*
* The maximum size of a value is 25MB.
*
* Due to the eventually consistent nature of Workers KV, concurrent writes from different edge locations can end up up overwriting one another.
* It’s a common pattern to write data via Wrangler or the API but read the data from within a worker, avoiding this issue by issuing all writes from the same location.
*
* Writes are immediately visible to other requests in the same edge location, but can take up to 60 seconds to be visible in other parts of the world.
*/
put(key: string, value: string | ReadableStream | ArrayBuffer, opts?: KVPutOptions): Promise<void>;
// https://developers.cloudflare.com/workers/runtime-apis/kv#reading-key-value-pairs
/** Returns a promise you can await to get the value.
*
* If the key is not found, the promise will resolve with the literal value null.
*
* Note that get may return stale values -- if a given key has recently been read in a given location,
* changes to the key made in other locations may take up to 60 seconds to be visible.
*
* The type parameter can be any of:
* - "text": (default) a string
* - "json": an object decoded from a JSON string
* - "arrayBuffer": An ArrayBuffer instance.
* - "stream": A ReadableStream.
*
* For simple values it often makes sense to use the default "text" type which provides you with your value as a string.
* For convenience a "json" type is also specified which will convert a JSON value into an object before returning it to you.
* For large values you can request a ReadableStream, and for binary values an ArrayBuffer.
*
* For large values, the choice of type can have a noticeable effect on latency and CPU usage.
* For reference, the types can be ordered from fastest to slowest as "stream", "arrayBuffer", "text", and "json". */
get(key: string, opts: KVGetOptions | { type: 'text' }): Promise<string | null>;
get(key: string, opts: KVGetOptions | { type: 'json' }): Promise<Record<string, unknown> | null>;
get(key: string, opts: KVGetOptions | { type: 'arrayBuffer' }): Promise<ArrayBuffer | null>;
get(key: string, opts: KVGetOptions | { type: 'stream' }): Promise<ReadableStream | null>;
// https://developers.cloudflare.com/workers/runtime-apis/kv#metadata-1
/** Gets the metadata associated with a key-value pair alongside its value.
*
* If there’s no metadata associated with the requested key-value pair, null will be returned for metadata. */
getWithMetadata(key: string, opts: KVGetOptions | { type: 'text' }): Promise<KVValueAndMetadata<string> | null>;
getWithMetadata(key: string, opts: KVGetOptions | { type: 'json' }): Promise<KVValueAndMetadata<Record<string, unknown>> | null>;
getWithMetadata(key: string, opts: KVGetOptions | { type: 'arrayBuffer' }): Promise<KVValueAndMetadata<ArrayBuffer> | null>;
getWithMetadata(key: string, opts: KVGetOptions | { type: 'stream' }): Promise<KVValueAndMetadata<ReadableStream> | null>;
// https://developers.cloudflare.com/workers/runtime-apis/kv#deleting-key-value-pairs
/** Removes the key and value from your namespace.
*
* As with any operations, it may take some time to see that the key has been deleted from various points at the edge.
*
* This method returns a promise that you should await on in order to verify successful deletion. */
delete(key: string): Promise<void>;
// https://developers.cloudflare.com/workers/runtime-apis/kv#listing-keys
/** List all of the keys that live in a given namespace.
*
* Changes may take up to 60 seconds to be visible when listing keys.
*/
list(opts?: KVListOptions): Promise<KVListCompleteResult | KVListIncompleteResult>;
}
//#endregion
//#region Workers Cache https://developers.cloudflare.com/workers/runtime-apis/cache https://developers.cloudflare.com/workers/learning/how-the-cache-works
/**
* The Cache API is available globally but the contents of the cache do not replicate outside of the originating data center.
*
* Any Cache API operations in the Cloudflare Workers dashboard editor, Playground previews, and any *.workers.dev deployments will have no impact.
* Only Workers deployed to custom domains have access to functional Cache operations.
*/
export interface CfGlobalCaches {
/**
* The default cache (the same cache shared with fetch requests).
*
* This is useful when needing to override content that is already cached, after receiving the response.
*
* This affects your public cache.
*
* Zone-level: it uses the same cache "namespace" as your zone (e.g. example.com)
*/
readonly default: CfCache; // caches.default
/**
* A namespaced cache (separate from the cache shared with fetch requests).
*
* This can be useful when writing new content to the cache, for example after running a more compute heavy operation such as parsing HTML or running a computation,
* to store them locally in the colo and readily access them on the following request, rather having to rerun the same operation.
*
* The public cannot access this (unless you let them through your Worker).
*
* Account-level: two different workers can access the same cache using the same name! They will be accessing the same namespace.
*/
open(cacheName: string): Promise<CfCache>;
}
export interface CfCacheOptions {
/** Consider the request method a GET regardless of its actual value. */
readonly ignoreMethod?: boolean;
}
export interface CfCache {
/** Adds to the cache a response keyed to the given request.
*
* Returns a promise that resolves to undefined once the cache stores the response.
*
* @param request Either a string or a Request object to serve as the key. If a string is passed, it is interpreted as the URL for a new Request object.
* @param response A Response object to store under the given key.
*
* @throws if the request passed is a method other than GET
* @throws if the response passed is a status of 206 Partial Content
* @throws if the response passed contains the header Vary: * (required by the Cache API specification)
* */
put(request: string | Request, response: Response): Promise<undefined>;
/** Returns a promise wrapping the response object keyed to that request.
*
* Never sends a subrequest to the origin. If no matching response is found in cache, the promise that cache.match() returns is fulfilled with undefined.
*
* @param request The string or Request object used as the lookup key. Strings are interpreted as the URL for a new Request object.
*/
match(request: string | Request, opts?: CfCacheOptions): Promise<Response | undefined>;
/** Deletes the Response object from the cache and returns a Promise for a Boolean response:
*
* true: The response was cached but is now deleted
* false: The response was not in the cache at the time of deletion.
*
* @param request The string or Request object used as the lookup key. Strings are interpreted as the URL for a new Request object.
* */
delete(request: string | Request, opts?: CfCacheOptions): Promise<boolean>;
}
//#endregion
//#region Accessing a Durable Object from a Worker
// https://developers.cloudflare.com/workers/runtime-apis/durable-objects#accessing-a-durable-object-from-a-worker
/** The Durable Object namespace is configured to use a particular class, and controls access to instances of that class. */
export interface DurableObjectNamespace {
/** Creates a new object ID randomly.
*
* This method will never return the same ID twice, and thus it is guaranteed that the object does not yet exist and has never existed at the time the method returns.
*
* https://developers.cloudflare.com/workers/runtime-apis/durable-objects#generating-ids-randomly
* */
newUniqueId(opts?: { jurisdiction: 'eu' }): DurableObjectId;
/** This method derives a unique object ID from the given name string.
*
* It will always return the same ID when given the same name as input.
*
* https://developers.cloudflare.com/workers/runtime-apis/durable-objects#deriving-ids-from-names
* */
idFromName(name: string): DurableObjectId;
/** This method parses an ID that was previously stringified.
*
* This is useful in particular with IDs created using newUniqueId(), as these IDs need to be stored somewhere, probably as as a string.
*
* A stringified object ID is a 64-digit hexadecimal number. However, not all 64-digit hex numbers are valid IDs.
*
* This method will throw if it is passed an ID that was not originally created by newUniqueId() or idFromName().
*
* It will also throw if the ID was originally created for a different namespace.
*
* https://developers.cloudflare.com/workers/runtime-apis/durable-objects#parsing-previously-created-ids-from-strings
* */
idFromString(hexStr: string): DurableObjectId;
/** This method constructs an object "stub", which is a local client that provides access to a remote Durable Object.
*
* If the remote object does not already exist, it will be created.
*
* Thus, there will always be something for the stub to talk to.
*
* This method always returns the stub immediately, before it has connected to the remote object.
*
* This allows you to begin making requests to the object right away, without waiting for a network round trip.
*
* https://developers.cloudflare.com/workers/runtime-apis/durable-objects#obtaining-an-object-stub
* */
get(id: DurableObjectId): DurableObjectStub;
}
// https://developers.cloudflare.com/workers/runtime-apis/durable-objects#obtaining-an-object-stub
/** A Durable Object stub is a client object used to send requests to a remote Durable Object.
*
* Stubs implement E-order semantics. When you make multiple calls to the same stub, it is guaranteed that the calls will be delivered
* to the remote object in the order in which you made them. This ordering guarantee often makes many distributed programming problems easier.
* However, there is a cost: due to random network disruptions or other transient issues, a stub may become disconnected from its remote object.
* Once a stub is disconnected, it is permanently broken, and all in-flight calls and future calls will fail with exceptions.
* In order to make new requests to the Durable Object, you must call OBJECT_NAMESPACE.get(id) again to get a new stub,
* and you must keep in mind that there are no ordering guarantees between requests to the new stub vs. the old one.
* If you don't care about ordering at all, you can create a new stub for every request.
*/
export interface DurableObjectStub {
/** The fetch() method of a stub has the exact same signature as the global fetch.
*
* However, instead of sending an HTTP request to the internet, the request is always sent to the Durable Object to which the stub points.
*
* Any uncaught exceptions thrown by the Durable Object's fetch() handler are propagated to the caller's fetch() promise. */
fetch(url: RequestInfo, init?: RequestInit): Promise<Response>;
}
// https://developers.cloudflare.com/workers/runtime-apis/durable-objects#generating-ids-randomly
export interface DurableObjectId {
toString(): string;
}
//#endregion
//#region Durable Object implementation types
// deno-lint-ignore ban-types
export type DurableObjectStorageValue = number | string | object; // add more as necessary ("The value can be any type supported by the structured clone algorithm, which is true of most types.")
export interface DurableObjectStorageMethods {
/** Retrieves the value associated with the given key.
*
* The type of the returned value will be whatever was previously written for the key, or undefined if the key does not exist. */
get(key: string, opts?: DurableObjectStorageReadOptions): Promise<DurableObjectStorageValue | undefined>;
/** Retrieves the values associated with each of the provided keys.
*
* The type of each returned value in the Map will be whatever was previously written for the corresponding key.
* Any keys that do not exist will be omitted from the result Map.
*
* Supports up to 128 keys at a time. */
get(keys: readonly string[], opts?: DurableObjectStorageReadOptions): Promise<Map<string, DurableObjectStorageValue>>;
/** Stores the value and associates it with the given key.
*
* The value can be any type supported by the structured clone algorithm, which is true of most types.
*
* Keys are limited to a max size of 2048 bytes and values are limited to 32 KiB (32768 bytes).
*
* */
put(key: string, value: DurableObjectStorageValue, opts?: DurableObjectStorageWriteOptions): Promise<void>;
/** Takes an Object and stores each of its keys and values to storage.
*
* Each value can be any type supported by the structured clone algorithm, which is true of most types.
*
* Supports up to 128 key-value pairs at a time.
*
* Each key is limited to a max size of 2048 bytes and each value is limited to 32 KiB (32768 bytes). */
put(entries: Record<string, unknown>, opts?: DurableObjectStorageWriteOptions): Promise<void>;
/** Deletes the key and associated value.
*
* Returns true if the key existed or false if it didn't. */
delete(key: string, opts?: DurableObjectStorageWriteOptions): Promise<boolean>;
/** Deletes the provided keys and their associated values.
*
* Returns a count of the number of key-value pairs deleted. */
delete(keys: readonly string[], opts?: DurableObjectStorageWriteOptions): Promise<number>;
/** Returns all keys and values associated with the current Durable Object in ascending lexicographic sorted order.
*
* The type of each returned value in the Map will be whatever was previously written for the corresponding key.
*
* Be aware of how much data may be stored in your actor before calling this version of list without options,
* because it will all be loaded into the Durable Object's memory, potentially hitting its [limit](https://developers.cloudflare.com/workers/platform/limits).
*
* If that is a concern, pass options to list as documented below. */
list(): Promise<Map<string, DurableObjectStorageValue>>;
/** Returns keys associated with the current Durable Object according to the parameters in the provided DurableObjectStorageListOptions object. */
list(options: DurableObjectStorageListOptions & DurableObjectStorageReadOptions): Promise<Map<string, DurableObjectStorageValue>>;
}
export interface DurableObjectStorage extends DurableObjectStorageMethods {
// https://developers.cloudflare.com/workers/runtime-apis/durable-objects#methods
/** Runs the sequence of storage operations called on txn in a single transaction that either commits successfully or aborts.
*
* Failed transactions are retried automatically.
*
* Non-storage operations that affect external state, like calling fetch, may execute more than once if the transaction is retried.
*
* The closure can return a value, which will be propagated as the return value of the call. */
transaction<T>(closure: (txn: DurableObjectStorageTransaction) => T | PromiseLike<T>): Promise<T>;
/** Deletes all keys and associated values, effectively deallocating all storage used by the worker.
*
* Once deleteAll() has been called, no subsequent Durable Storage operations (including transactions and operations on transactions) may be executed
* until after the deleteAll() operation completes and the returned promise resolves.
*
* In the event of a failure while the deleteAll() operation is still in flight, it may be that only a subset of the data is properly deleted. */
deleteAll(): Promise<void>;
}
/** Provides access to the put(), get(), delete() and list() methods documented above to run in the current transaction context.
*
* In order to get transactional behavior within a transaction closure, you must call the methods on the txn object instead of on the top-level state.storage object.
*
* Also supports a rollback() function that ensures any changes made during the transaction will be rolled back rather than committed.
*
* After rollback() is called, any subsequent operations on the txn object will fail with an exception.
*
* rollback() takes no parameters and returns nothing to the caller. */
export interface DurableObjectStorageTransaction extends DurableObjectStorageMethods {
rollback(): void;
}
export interface DurableObjectStorageReadOptions {
/** Bypass the built-in concurrency gates */
readonly allowConcurrency?: boolean;
/** Bypass the built-in memory cache */
readonly noCache?: boolean;
}
export interface DurableObjectStorageWriteOptions {
/** Bypass the built-in waiting for write to complete before returning a response
*
* With the new storage memory cache in place, if you don't await a `put()`, it will now implicitly await it before returning a response,
* to avoid premature confirmation of writes. But if you actually don't want to wait, and you are OK with the possibility of rare data loss
* (e.g. if the power went out before the write completed), then you should use `{ allowUnconfirmed: true }` */
readonly allowUnconfirmed?: boolean;
/** Bypass the built-in memory cache */
readonly noCache?: boolean;
}
export interface DurableObjectStorageListOptions {
/** Key at which the list results should start, inclusive. */
readonly start?: string;
/** Key at which the list results should end, exclusive. */
readonly end?: string;
/** Restricts results to only include key-value pairs whose keys begin with the prefix. */
readonly prefix?: string;
/** If true, return results in descending lexicographic order instead of the default ascending order. */
readonly reverse?: boolean;
/** Maximum number of key-value pairs to return. */
readonly limit?: number;
}
/** Passed from the runtime to provide access to the Durable Object's storage as well as various metadata about the Object. */
export interface DurableObjectState {
/** The ID of this Durable Object.
*
* It can be converted into a hex string using its .toString() method. */
readonly id: DurableObjectId;
/** Contains methods for accessing persistent storage via the transactional storage API.
*
* See Transactional Storage API for a detailed reference. */
readonly storage: DurableObjectStorage;
/** Notifies the runtime to wait for the completion of asynchronous tasks that may complete after a response has already been sent.
*
* See [waitUntil()](https://developer.mozilla.org/en-US/docs/Web/API/ExtendableEvent/waitUntil) for a detailed reference. */
waitUntil(promise: Promise<unknown>): void;
/** Useful for delaying delivery of requests and other events while performing some critical state-affecting task.
*
* For example, this can be used to perform start-up initialization in an object’s constructor. */
blockConcurrencyWhile<T>(fn: () => Promise<T>): Promise<T>;
}
//#endregion
//#region WebSockets
export interface CloudflareWebSocketExtensions {
/** Accepts the Websocket connection and begins terminating requests for the WebSocket at Cloudflare's edge.
* This effectively enables the Workers runtime to begin responding to and handling WebSocket requests.
*
* https://developers.cloudflare.com/workers/runtime-apis/websockets#accept
* */
accept(): void;
/**
* Cloudflare-specific behavior:
*
* readyState is not implemented (and associated WebSocket.CONNECTED, CONNECTING etc), by the time you get it, it's already in the OPEN state
*
* send() may throw:
* - if accept() hasn't been called
* - if close() has already been called
* - if onerror has fired.
* - Otherwise, it adds the message to a send queue, which shouldn't ever throw
* - send() doesn't care if onclose has occurred since they represent opposite directions of the stream.
*/
}
export interface CloudflareResponseInitExtensions {
webSocket?: WebSocket & CloudflareWebSocketExtensions;
}
// non-standard class, only on CF
export interface WebSocketPair {
readonly 0: WebSocket; // client, returned in the ResponseInit
readonly 1: WebSocket & CloudflareWebSocketExtensions; // server, accept(), addEventListener(), send() and close()
}
//#endregion | the_stack |
import React from 'react';
import { act } from 'react-dom/test-utils';
import classNames from 'classnames';
import { mount } from './wrapper';
import type { CSSMotionProps } from '../src/CSSMotion';
import RefCSSMotion, { genCSSMotion } from '../src/CSSMotion';
import ReactDOM from 'react-dom';
describe('CSSMotion', () => {
const CSSMotion = genCSSMotion({
transitionSupport: true,
forwardRef: false,
});
beforeEach(() => {
jest.useFakeTimers();
});
afterEach(() => {
jest.useRealTimers();
});
describe('transition', () => {
function onCollapse() {
return { height: 0 };
}
function onExpand() {
return { height: 100 };
}
const actionList: {
name: string;
props: CSSMotionProps;
visible: boolean[];
oriHeight: number;
tgtHeight: number;
}[] = [
{
name: 'appear',
props: {
motionAppear: true,
onAppearStart: onCollapse,
onAppearActive: onExpand,
},
visible: [true],
oriHeight: 0,
tgtHeight: 100,
},
{
name: 'enter',
props: {
motionEnter: true,
onEnterStart: onCollapse,
onEnterActive: onExpand,
},
visible: [false, true],
oriHeight: 0,
tgtHeight: 100,
},
{
name: 'leave',
props: {
motionLeave: true,
onLeaveStart: onExpand,
onLeaveActive: onCollapse,
},
visible: [true, false],
oriHeight: 100,
tgtHeight: 0,
},
];
actionList.forEach(({ name, props, visible, oriHeight, tgtHeight }) => {
class Demo extends React.Component {
state = {
visible: visible[0],
};
render() {
return (
<CSSMotion
motionName="transition"
motionAppear={false}
motionEnter={false}
motionLeave={false}
visible={this.state.visible}
{...props}
>
{({ style, className, visible: motionVisible }) => {
expect(motionVisible).toEqual(this.state.visible);
return (
<div
style={style}
className={classNames('motion-box', className)}
/>
);
}}
</CSSMotion>
);
}
}
it(name, () => {
const nextVisible = visible[1];
const wrapper = mount(<Demo />);
function doStartTest() {
wrapper.update();
const boxNode = wrapper.find('.motion-box');
expect(boxNode.hasClass('transition')).toBeTruthy();
expect(boxNode.hasClass(`transition-${name}`)).toBeTruthy();
expect(boxNode.hasClass(`transition-${name}-active`)).toBeFalsy();
expect(boxNode.props().style.height).toEqual(oriHeight);
// Motion active
act(() => {
jest.runAllTimers();
wrapper.update();
});
const activeBoxNode = wrapper.find('.motion-box');
expect(activeBoxNode.hasClass('transition')).toBeTruthy();
expect(activeBoxNode.hasClass(`transition-${name}`)).toBeTruthy();
expect(
activeBoxNode.hasClass(`transition-${name}-active`),
).toBeTruthy();
expect(activeBoxNode.props().style.height).toEqual(tgtHeight);
// Motion end
wrapper.triggerMotionEvent();
act(() => {
jest.runAllTimers();
wrapper.update();
});
if (nextVisible === false) {
expect(wrapper.find('.motion-box')).toHaveLength(0);
} else if (nextVisible !== undefined) {
const finalBoxNode = wrapper.find('.motion-box');
expect(finalBoxNode.hasClass('transition')).toBeFalsy();
expect(finalBoxNode.hasClass(`transition-${name}`)).toBeFalsy();
expect(
finalBoxNode.hasClass(`transition-${name}-active`),
).toBeFalsy();
expect(finalBoxNode.props().style).toBeFalsy();
}
}
// Delay for the visible finished
if (nextVisible !== undefined) {
wrapper.setState({ visible: nextVisible });
doStartTest();
} else {
doStartTest();
}
});
});
it('stop transition if config motion to false', () => {
const wrapper = mount(
<CSSMotion motionName="transition" visible>
{({ style, className }) => (
<div
style={style}
className={classNames('motion-box', className)}
/>
)}
</CSSMotion>,
);
wrapper.update();
let boxNode = wrapper.find('.motion-box');
expect(boxNode.hasClass('transition')).toBeTruthy();
expect(boxNode.hasClass('transition-appear')).toBeTruthy();
expect(boxNode.hasClass('transition-appear-active')).toBeFalsy();
act(() => {
wrapper.setProps({ motionAppear: false });
jest.runAllTimers();
wrapper.update();
});
boxNode = wrapper.find('.motion-box');
expect(boxNode.hasClass('transition')).toBeFalsy();
expect(boxNode.hasClass('transition-appear')).toBeFalsy();
expect(boxNode.hasClass('transition-appear-active')).toBeFalsy();
});
it('quick switch should have correct status', async () => {
const wrapper = mount(
<CSSMotion motionName="transition">
{({ style, className }) => (
<div
style={style}
className={classNames('motion-box', className)}
/>
)}
</CSSMotion>,
);
wrapper.setProps({ visible: true });
act(() => {
jest.runAllTimers();
});
wrapper.setProps({ visible: false });
act(() => {
jest.runAllTimers();
wrapper.update();
});
let boxNode = wrapper.find('.motion-box');
expect(boxNode.hasClass('transition')).toBeTruthy();
expect(boxNode.hasClass('transition-leave')).toBeTruthy();
expect(boxNode.hasClass('transition-leave-active')).toBeTruthy();
wrapper.setProps({ visible: true });
await act(() => {
return Promise.resolve().then(() => {
wrapper.setProps({ visible: false });
});
});
act(() => {
jest.runAllTimers();
wrapper.update();
});
boxNode = wrapper.find('.motion-box');
expect(boxNode.hasClass('transition')).toBeTruthy();
expect(boxNode.hasClass('transition-leave')).toBeTruthy();
expect(boxNode.hasClass('transition-leave-active')).toBeTruthy();
wrapper.unmount();
});
describe('deadline should work', () => {
function test(name: string, Component: React.ComponentType<any>) {
it(name, () => {
const onAppearEnd = jest.fn();
mount(
<CSSMotion
motionName="transition"
motionDeadline={1000}
onAppearEnd={onAppearEnd}
visible
>
{({ style, className }, ref) => (
<Component
ref={ref}
style={style}
className={classNames('motion-box', className)}
/>
)}
</CSSMotion>,
);
expect(onAppearEnd).not.toHaveBeenCalled();
act(() => {
jest.runAllTimers();
});
expect(onAppearEnd).toHaveBeenCalled();
});
}
test(
'without ref',
React.forwardRef(props => <div {...props} />),
);
test(
'FC with ref',
React.forwardRef((props, ref) => <div {...props} ref={ref} />),
);
test(
'FC but not dom ref',
React.forwardRef((props, ref) => {
React.useImperativeHandle(ref, () => ({}));
return <div {...props} />;
}),
);
});
it('not crash when no children', () => {
const wrapper = mount(<CSSMotion motionName="transition" visible />);
expect(wrapper.render()).toMatchSnapshot();
});
});
describe('animation', () => {
const actionList = [
{
name: 'appear',
props: { motionAppear: true },
visible: [true],
},
{
name: 'enter',
props: { motionEnter: true },
visible: [false, true],
},
{
name: 'leave',
props: { motionLeave: true },
visible: [true, false],
},
];
actionList.forEach(({ name, visible, props }) => {
class Demo extends React.Component {
state = {
visible: visible[0],
};
render() {
return (
<CSSMotion
motionName="animation"
motionAppear={false}
motionEnter={false}
motionLeave={false}
visible={this.state.visible}
{...props}
>
{({ style, className }) => (
<div
style={style}
className={classNames('motion-box', className)}
/>
)}
</CSSMotion>
);
}
}
it(name, () => {
const wrapper = mount(<Demo />);
wrapper.update();
const nextVisible = visible[1];
function doStartTest() {
// Motion active
act(() => {
jest.runAllTimers();
wrapper.update();
});
const activeBoxNode = wrapper.find('.motion-box');
expect(activeBoxNode.hasClass('animation')).toBeTruthy();
expect(activeBoxNode.hasClass(`animation-${name}`)).toBeTruthy();
expect(
activeBoxNode.hasClass(`animation-${name}-active`),
).toBeTruthy();
}
// Delay for the visible finished
if (nextVisible !== undefined) {
wrapper.setState({ visible: nextVisible });
doStartTest();
} else {
doStartTest();
}
});
});
});
it('not block motion when motion set delay', () => {
const wrapper = mount(
<CSSMotion visible>
{({ style, className }) => (
<div style={style} className={classNames('motion-box', className)} />
)}
</CSSMotion>,
);
wrapper.setProps({
motionName: 'animation',
motionLeave: true,
visible: false,
});
act(() => {
jest.runAllTimers();
wrapper.update();
});
const activeBoxNode = wrapper.find('.motion-box');
expect(activeBoxNode.hasClass(`animation-leave-active`)).toBeTruthy();
});
describe('immediately', () => {
it('motionLeaveImmediately', async () => {
const wrapper = mount(
<CSSMotion
motionName="transition"
motionLeaveImmediately
visible={false}
>
{({ style, className }) => (
<div
style={style}
className={classNames('motion-box', className)}
/>
)}
</CSSMotion>,
);
wrapper.update();
const boxNode = wrapper.find('.motion-box');
expect(boxNode.hasClass('transition')).toBeTruthy();
expect(boxNode.hasClass('transition-leave')).toBeTruthy();
expect(boxNode.hasClass('transition-leave-active')).toBeFalsy();
// Motion active
await act(async () => {
jest.runAllTimers();
await Promise.resolve();
wrapper.update();
});
const activeBoxNode = wrapper.find('.motion-box');
expect(activeBoxNode.hasClass('transition')).toBeTruthy();
expect(activeBoxNode.hasClass('transition-leave')).toBeTruthy();
expect(activeBoxNode.hasClass('transition-leave-active')).toBeTruthy();
});
});
it('no transition', () => {
const NoCSSTransition = genCSSMotion({
transitionSupport: false,
forwardRef: false,
});
const wrapper = mount(
<NoCSSTransition motionName="transition">
{({ style, className }) => (
<div style={style} className={classNames('motion-box', className)} />
)}
</NoCSSTransition>,
);
const boxNode = wrapper.find('.motion-box');
expect(boxNode.hasClass('transition')).toBeFalsy();
expect(boxNode.hasClass('transition-appear')).toBeFalsy();
expect(boxNode.hasClass('transition-appear-active')).toBeFalsy();
});
it('forwardRef', () => {
const domRef = React.createRef();
mount(
<RefCSSMotion motionName="transition" ref={domRef}>
{({ style, className }, ref) => (
<div
ref={ref}
style={style}
className={classNames('motion-box', className)}
/>
)}
</RefCSSMotion>,
);
expect(domRef.current instanceof HTMLElement).toBeTruthy();
});
it("onMotionEnd shouldn't be fired by inner element", () => {
const onLeaveEnd = jest.fn();
const wrapper = mount(
<CSSMotion
visible
motionName="bamboo"
onLeaveEnd={onLeaveEnd}
removeOnLeave={false}
>
{(_, ref) => (
<div className="outer-block" ref={ref}>
<div className="inner-block" />
</div>
)}
</CSSMotion>,
);
function resetLeave() {
act(() => {
wrapper.setProps({ visible: true });
jest.runAllTimers();
wrapper.update();
wrapper.setProps({ visible: false });
jest.runAllTimers();
wrapper.update();
});
}
resetLeave();
wrapper.triggerMotionEvent();
expect(onLeaveEnd).toHaveBeenCalledTimes(1);
resetLeave();
wrapper.triggerMotionEvent(wrapper.find('.outer-block'));
expect(onLeaveEnd).toHaveBeenCalledTimes(2);
resetLeave();
wrapper.triggerMotionEvent(wrapper.find('.inner-block'));
expect(onLeaveEnd).toHaveBeenCalledTimes(2);
});
it('switch dom should work', () => {
const Demo = ({
Component,
...props
}: Partial<CSSMotionProps> & { Component: any }) => {
return (
<CSSMotion {...props} motionName="bamboo">
{({ style, className }) => (
<Component
style={style}
className={classNames('motion-box', className)}
/>
)}
</CSSMotion>
);
};
const onLeaveEnd = jest.fn();
const wrapper = mount(
<Demo
visible
onLeaveEnd={onLeaveEnd}
motionDeadline={233}
Component="div"
/>,
);
act(() => {
jest.runAllTimers();
wrapper.update();
});
wrapper.setProps({ Component: 'p', visible: false });
act(() => {
jest.runAllTimers();
wrapper.update();
});
expect(onLeaveEnd).toHaveBeenCalled();
});
it('prepare should block motion start', async () => {
let lockResolve: Function;
const onAppearPrepare = jest.fn(
() =>
new Promise(resolve => {
lockResolve = resolve;
}),
);
const wrapper = mount(
<CSSMotion visible motionName="bamboo" onAppearPrepare={onAppearPrepare}>
{({ style, className }) => (
<div style={style} className={classNames('motion-box', className)} />
)}
</CSSMotion>,
);
act(() => {
jest.runAllTimers();
wrapper.update();
});
// Locked
expect(
wrapper.find('.motion-box').hasClass('bamboo-appear-prepare'),
).toBeTruthy();
// Release
await act(async () => {
lockResolve();
await Promise.resolve();
jest.runAllTimers();
wrapper.update();
});
expect(
wrapper.find('.motion-box').hasClass('bamboo-appear-prepare'),
).toBeFalsy();
});
it('forceRender', () => {
const wrapper = mount(
<CSSMotion forceRender motionName="bamboo" visible={false}>
{({ style, className }) => (
<div style={style} className={classNames('motion-box', className)} />
)}
</CSSMotion>,
);
expect(wrapper.find('.motion-box').props().style).toEqual({
display: 'none',
});
// Reset should hide
wrapper.setProps({ forceRender: false });
expect(wrapper.find('.motion-box')).toHaveLength(0);
});
it('render null on first when removeOnLeave is false', () => {
const wrapper = mount(
<CSSMotion
motionName="bamboo"
removeOnLeave={false}
leavedClassName="removed"
visible={false}
>
{({ style, className }) => (
<div style={style} className={classNames('motion-box', className)} />
)}
</CSSMotion>,
);
expect(wrapper.find('.motion-box')).toHaveLength(0);
// Visible
wrapper.setProps({ visible: true });
act(() => {
jest.runAllTimers();
wrapper.update();
});
expect(wrapper.find('.motion-box')).toHaveLength(1);
// Hide again
wrapper.setProps({ visible: false });
act(() => {
jest.runAllTimers();
const transitionEndEvent = new Event('transitionend');
(
wrapper.find('.motion-box').instance() as any as HTMLElement
).dispatchEvent(transitionEndEvent);
jest.runAllTimers();
wrapper.update();
});
expect(wrapper.find('.motion-box')).toHaveLength(1);
expect(wrapper.find('.motion-box').hasClass('removed')).toBeTruthy();
});
describe('strict mode', () => {
beforeEach(() => {
jest.spyOn(ReactDOM, 'findDOMNode');
});
afterEach(() => {
jest.resetAllMocks();
});
it('calls findDOMNode when no refs are passed', () => {
const wrapper = mount(
<CSSMotion motionName="transition" visible>
{() => <div />}
</CSSMotion>,
);
act(() => {
jest.runAllTimers();
wrapper.update();
});
expect(ReactDOM.findDOMNode).toHaveBeenCalled();
});
it('does not call findDOMNode when ref is passed internally', () => {
const wrapper = mount(
<CSSMotion motionName="transition" visible>
{(props, ref) => <div ref={ref} />}
</CSSMotion>,
);
act(() => {
jest.runAllTimers();
wrapper.update();
});
expect(ReactDOM.findDOMNode).not.toHaveBeenCalled();
});
it('calls findDOMNode when refs are forwarded but not assigned', () => {
const domRef = React.createRef();
const wrapper = mount(
<CSSMotion motionName="transition" visible ref={domRef}>
{() => <div />}
</CSSMotion>,
);
act(() => {
jest.runAllTimers();
wrapper.update();
});
expect(ReactDOM.findDOMNode).toHaveBeenCalled();
});
it('does not call findDOMNode when refs are forwarded and assigned', () => {
const domRef = React.createRef();
const wrapper = mount(
<CSSMotion motionName="transition" visible ref={domRef}>
{(props, ref) => <div ref={ref} />}
</CSSMotion>,
);
act(() => {
jest.runAllTimers();
wrapper.update();
});
expect(ReactDOM.findDOMNode).not.toHaveBeenCalled();
});
});
}); | the_stack |
* @module OrbitGT
*/
//package orbitgt.pointcloud.model;
type int8 = number;
type int16 = number;
type int32 = number;
type float32 = number;
type float64 = number;
import { ABuffer } from "../../system/buffer/ABuffer";
import { LittleEndian } from "../../system/buffer/LittleEndian";
import { InStream } from "../../system/io/InStream";
import { OutStream } from "../../system/io/OutStream";
import { ALong } from "../../system/runtime/ALong";
import { ASystem } from "../../system/runtime/ASystem";
import { Numbers } from "../../system/runtime/Numbers";
import { AttributeTypes } from "./AttributeTypes";
/**
* Class AttributeValue holds a single (typed) attribute value.
*
* @version 1.0 August 2013
*/
/** @internal */
export class AttributeValue {
/** The 'false' value */
public static readonly FALSE: AttributeValue = AttributeValue.createBoolean(false);
/** The 'true' value */
public static readonly TRUE: AttributeValue = AttributeValue.createBoolean(true);
/** The type */
private _type: int32;
/** The value for boolean/int1/int2/int4 types */
private _valueI4: int32;
/** The value for int8 types */
private _valueI8: ALong;
/** The value for float4 types */
private _valueF4: float32;
/** The value for float8 types */
private _valueF8: float64;
/**
* Create a new (empty) value.
*/
public constructor() {
this._type = 0;
this._valueI4 = 0;
this._valueI8 = ALong.ZERO;
this._valueF4 = 0.0;
this._valueF8 = 0.0;
}
/**
* Create a new value.
* @param value the value.
*/
public static createBoolean(value: boolean): AttributeValue {
let avalue: AttributeValue = new AttributeValue();
avalue.setBoolean(value);
return avalue;
}
/**
* Create a new value.
* @param value the value.
*/
public static createInt1(value: int32): AttributeValue {
let avalue: AttributeValue = new AttributeValue();
avalue.setInt1(value);
return avalue;
}
/**
* Create a new value.
* @param value the value.
*/
public static createInt2(value: int32): AttributeValue {
let avalue: AttributeValue = new AttributeValue();
avalue.setInt2(value);
return avalue;
}
/**
* Create a new value.
* @param value the value.
*/
public static createInt4(value: int32): AttributeValue {
let avalue: AttributeValue = new AttributeValue();
avalue.setInt4(value);
return avalue;
}
/**
* Create a new value.
* @param value the value.
*/
public static createInt8(value: ALong): AttributeValue {
let avalue: AttributeValue = new AttributeValue();
avalue.setInt8(value);
return avalue;
}
/**
* Create a new value.
* @param value the value.
*/
public static createFloat4(value: float32): AttributeValue {
let avalue: AttributeValue = new AttributeValue();
avalue.setFloat4(value);
return avalue;
}
/**
* Create a new value.
* @param value the value.
*/
public static createFloat8(value: float64): AttributeValue {
let avalue: AttributeValue = new AttributeValue();
avalue.setFloat8(value);
return avalue;
}
/**
* Create a new value.
* @param value the value.
*/
public static createColor(value: int32): AttributeValue {
let avalue: AttributeValue = new AttributeValue();
avalue.setColor(value);
return avalue;
}
/**
* Get the type.
* @return the type.
*/
public getType(): int32 {
return this._type;
}
/**
* Clear the value.
*/
public clear(): void {
this._type = 0;
}
/**
* Is this an empty value?
* @return true for empty.
*/
public isEmpty(): boolean {
return (this._type == 0);
}
/**
* Get the value.
* @return the value.
*/
public getBoolean(): boolean {
return (this._valueI4 != 0);
}
/**
* Get the value.
* @return the value.
*/
public getBooleanAsInt(): int32 {
return this._valueI4;
}
/**
* Set the value.
* @param value the new value.
*/
public setBoolean(value: boolean): void {
this._type = AttributeTypes.TYPE_BOOLEAN;
if (value) this._valueI4 = 1;
else this._valueI4 = 0;
}
/**
* Set the value.
* @param value the new value.
*/
public setBooleanFromInt(value: int32): void {
this._type = AttributeTypes.TYPE_BOOLEAN;
if (value == 0) this._valueI4 = 0;
else this._valueI4 = 1;
}
/**
* Get the value.
* @return the value.
*/
public getInt1(): int32 {
return this._valueI4;
}
/**
* Set the value.
* @param value the new value.
*/
public setInt1(value: int32): void {
this._type = AttributeTypes.TYPE_INT1;
this._valueI4 = value;
}
/**
* Get the value.
* @return the value.
*/
public getInt2(): int32 {
return this._valueI4;
}
/**
* Set the value.
* @param value the new value.
*/
public setInt2(value: int32): void {
this._type = AttributeTypes.TYPE_INT2;
this._valueI4 = value;
}
/**
* Get the value.
* @return the value.
*/
public getInt4(): int32 {
return this._valueI4;
}
/**
* Set the value.
* @param value the new value.
*/
public setInt4(value: int32): void {
this._type = AttributeTypes.TYPE_INT4;
this._valueI4 = value;
}
/**
* Get the value.
* @return the value.
*/
public getInt8(): ALong {
return this._valueI8;
}
/**
* Set the value.
* @param value the new value.
*/
public setInt8(value: ALong): void {
this._type = AttributeTypes.TYPE_INT8;
this._valueI8 = value;
}
/**
* Get the value.
* @return the value.
*/
public getFloat4(): float32 {
return this._valueF4;
}
/**
* Set the value.
* @param value the new value.
*/
public setFloat4(value: float32): void {
this._type = AttributeTypes.TYPE_FLOAT4;
this._valueF4 = value;
}
/**
* Get the value.
* @return the value.
*/
public getFloat8(): float64 {
return this._valueF8;
}
/**
* Set the value.
* @param value the new value.
*/
public setFloat8(value: float64): void {
this._type = AttributeTypes.TYPE_FLOAT8;
this._valueF8 = value;
}
/**
* Get the value.
* @return the value.
*/
public getColor(): int32 {
return this._valueI4;
}
/**
* Set the value.
* @param value the new value.
*/
public setColor(value: int32): void {
this._type = AttributeTypes.TYPE_COLOR;
this._valueI4 = value;
}
/**
* Check if the value equals another value.
* @param other the other value.
* @return true if same.
*/
public same(other: AttributeValue): boolean {
/* Check the type */
if (other._type != this._type) return false;
/* Check the value */
if (this._type == AttributeTypes.TYPE_BOOLEAN) return (other._valueI4 == this._valueI4);
if (this._type == AttributeTypes.TYPE_INT1) return (other._valueI4 == this._valueI4);
if (this._type == AttributeTypes.TYPE_INT2) return (other._valueI4 == this._valueI4);
if (this._type == AttributeTypes.TYPE_INT4) return (other._valueI4 == this._valueI4);
if (this._type == AttributeTypes.TYPE_INT8) return (other._valueI8.same(this._valueI8));
if (this._type == AttributeTypes.TYPE_FLOAT4) return (other._valueF4 == this._valueF4);
if (this._type == AttributeTypes.TYPE_FLOAT8) return (other._valueF8 == this._valueF8);
if (this._type == AttributeTypes.TYPE_COLOR) return (other._valueI4 == this._valueI4);
/* Empty value */
return true;
}
/**
* Copy to another value.
* @param other the other value to copy to.
*/
public copyTo(other: AttributeValue): void {
/* Copy the type */
other._type = this._type;
/* Check the value */
if (this._type == AttributeTypes.TYPE_BOOLEAN) { other._valueI4 = this._valueI4; return; }
if (this._type == AttributeTypes.TYPE_INT1) { other._valueI4 = this._valueI4; return; }
if (this._type == AttributeTypes.TYPE_INT2) { other._valueI4 = this._valueI4; return; }
if (this._type == AttributeTypes.TYPE_INT4) { other._valueI4 = this._valueI4; return; }
if (this._type == AttributeTypes.TYPE_INT8) { other._valueI8 = this._valueI8; return; }
if (this._type == AttributeTypes.TYPE_FLOAT4) { other._valueF4 = this._valueF4; return; }
if (this._type == AttributeTypes.TYPE_FLOAT8) { other._valueF8 = this._valueF8; return; }
if (this._type == AttributeTypes.TYPE_COLOR) { other._valueI4 = this._valueI4; return; }
/* Empty value */
}
/**
* Copy the value.
* @return the copied value.
*/
public copy(): AttributeValue {
let copy: AttributeValue = new AttributeValue();
this.copyTo(copy);
return copy;
}
/**
* Copy a list of values.
* @return the copied values.
*/
public static copyList(list: Array<AttributeValue>): Array<AttributeValue> {
if (list == null) return null;
let list2: Array<AttributeValue> = new Array<AttributeValue>(list.length);
for (let i: number = 0; i < list.length; i++) list2[i] = list[i].copy();
return list2;
}
/**
* Get the value as a string.
* @return the string.
*/
public asString(): string {
if (this._type == AttributeTypes.TYPE_BOOLEAN) return "" + this.getBoolean();
else if (this._type == AttributeTypes.TYPE_INT1) return "" + this.getInt1();
else if (this._type == AttributeTypes.TYPE_INT2) return "" + this.getInt2();
else if (this._type == AttributeTypes.TYPE_INT4) return "" + this.getInt4();
else if (this._type == AttributeTypes.TYPE_INT8) return "" + this.getInt8().toString();
else if (this._type == AttributeTypes.TYPE_FLOAT4) return "" + this.getFloat4();
else if (this._type == AttributeTypes.TYPE_FLOAT8) return "" + this.getFloat8();
else if (this._type == AttributeTypes.TYPE_COLOR) return Numbers.rgbToString(this.getColor());
return "";
}
/**
* The standard toString method.
* @see Object#toString
*/
public toString(): string {
return "[AttributeValue:type=" + AttributeTypes.getTypeName(this._type) + ",value=" + this.asString() + "]";
}
/**
* Create a default attribute value.
* @param type the type of value.
* @return a default value.
*/
public static createDefault(type: int32): AttributeValue {
if (type == AttributeTypes.TYPE_BOOLEAN) return AttributeValue.createBoolean(false);
if (type == AttributeTypes.TYPE_INT1) return AttributeValue.createInt1(0);
if (type == AttributeTypes.TYPE_INT2) return AttributeValue.createInt2(0);
if (type == AttributeTypes.TYPE_INT4) return AttributeValue.createInt4(0);
if (type == AttributeTypes.TYPE_INT8) return AttributeValue.createInt8(ALong.ZERO);
if (type == AttributeTypes.TYPE_FLOAT4) return AttributeValue.createFloat4(0.0);
if (type == AttributeTypes.TYPE_FLOAT8) return AttributeValue.createFloat8(0.0);
if (type == AttributeTypes.TYPE_COLOR) return AttributeValue.createColor(0);
ASystem.assertNot(true, "Cannot create attribute value of type " + type);
return null;
}
/**
* Read an attribute value.
* @param buffer the buffer to read from.
* @param bufferOffset the buffer offset to read from.
* @param attributeType the type of the attribute.
* @param the value to read into.
*/
public static readFromBufferTo(buffer: ABuffer, bufferOffset: int32, attributeType: int32, value: AttributeValue): void {
if (attributeType == AttributeTypes.TYPE_BOOLEAN) value.setBoolean(LittleEndian.readBufferByte(buffer, bufferOffset) != 0);
else if (attributeType == AttributeTypes.TYPE_INT1) value.setInt1(LittleEndian.readBufferByte(buffer, bufferOffset));
else if (attributeType == AttributeTypes.TYPE_INT2) value.setInt2(LittleEndian.readBufferShort(buffer, bufferOffset));
else if (attributeType == AttributeTypes.TYPE_INT4) value.setInt4(LittleEndian.readBufferInt(buffer, bufferOffset));
else if (attributeType == AttributeTypes.TYPE_INT8) value.setInt8(LittleEndian.readBufferLong(buffer, bufferOffset));
else if (attributeType == AttributeTypes.TYPE_FLOAT4) value.setFloat4(LittleEndian.readBufferFloat(buffer, bufferOffset));
else if (attributeType == AttributeTypes.TYPE_FLOAT8) value.setFloat8(LittleEndian.readBufferDouble(buffer, bufferOffset));
else if (attributeType == AttributeTypes.TYPE_COLOR) value.setColor(LittleEndian.readBufferInt3(buffer, bufferOffset));
else ASystem.assertNot(true, "Cannot read attribute value type " + attributeType);
}
/**
* Read an attribute value.
* @param input the input stream to read from.
* @param attributeType the type of the attribute.
* @param the value to read into.
*/
public static readFromStreamTo(stream: InStream, attributeType: int32, value: AttributeValue): void {
if (attributeType == AttributeTypes.TYPE_BOOLEAN) value.setBoolean(LittleEndian.readStreamByte(stream) != 0);
else if (attributeType == AttributeTypes.TYPE_INT1) value.setInt1(LittleEndian.readStreamByte(stream));
else if (attributeType == AttributeTypes.TYPE_INT2) value.setInt2(LittleEndian.readStreamShort(stream));
else if (attributeType == AttributeTypes.TYPE_INT4) value.setInt4(LittleEndian.readStreamInt(stream));
else if (attributeType == AttributeTypes.TYPE_INT8) value.setInt8(LittleEndian.readStreamLong(stream));
else if (attributeType == AttributeTypes.TYPE_FLOAT4) value.setFloat4(LittleEndian.readStreamFloat(stream));
else if (attributeType == AttributeTypes.TYPE_FLOAT8) value.setFloat8(LittleEndian.readStreamDouble(stream));
else if (attributeType == AttributeTypes.TYPE_COLOR) value.setColor(LittleEndian.readStreamInt3(stream));
else ASystem.assertNot(true, "Cannot read attribute value type " + attributeType);
}
/**
* Read an attribute value.
* @param input the input stream to read from.
* @param attributeType the type of the attribute.
* @return the value.
*/
public static readFromStream(stream: InStream, attributeType: int32): AttributeValue {
let value: AttributeValue = new AttributeValue();
AttributeValue.readFromStreamTo(stream, attributeType, value);
return value;
}
/**
* Write an attribute value.
* @param output the output stream to write to.
* @param attributeType the type of the attribute.
* @param value the value of the attribute.
*/
public static writeToStream(stream: OutStream, attributeType: int32, value: AttributeValue): void {
if (attributeType == AttributeTypes.TYPE_BOOLEAN) LittleEndian.writeStreamByte(stream, value.getBooleanAsInt());
else if (attributeType == AttributeTypes.TYPE_INT1) LittleEndian.writeStreamByte(stream, value.getInt1());
else if (attributeType == AttributeTypes.TYPE_INT2) LittleEndian.writeStreamShort(stream, value.getInt2());
else if (attributeType == AttributeTypes.TYPE_INT4) LittleEndian.writeStreamInt(stream, value.getInt4());
else if (attributeType == AttributeTypes.TYPE_INT8) LittleEndian.writeStreamLong(stream, value.getInt8());
else if (attributeType == AttributeTypes.TYPE_FLOAT4) LittleEndian.writeStreamFloat(stream, value.getFloat4());
else if (attributeType == AttributeTypes.TYPE_FLOAT8) LittleEndian.writeStreamDouble(stream, value.getFloat8());
else if (attributeType == AttributeTypes.TYPE_COLOR) LittleEndian.writeStreamInt3(stream, value.getColor());
else ASystem.assertNot(true, "Cannot write attribute value type " + attributeType);
}
} | the_stack |
import * as THREE from 'three';
import React, { useEffect, useState, useRef } from 'react';
import SampleType from '../../game/data/sampleType';
import {
GetInventoryMapping,
GetInventoryRows,
GetInventoryColumns,
GetItemResourceIndex,
MapItem,
CanUseItem,
} from '../../game/data/inventory';
import { getText } from '../../resources';
import '../styles/inventory.scss';
import {
createOverlayScene,
createOverlayClock,
createOverlayCanvas,
createOverlayRenderer,
loadSceneInventoryModel,
} from './overlay';
import { ControlsState } from '../../game/ControlsState';
import { getParams } from '../../params';
import { heroUseItem } from '../../game/loop/hero';
const InventoryObjectsIndex = 4;
const InventoryTextOffset = 100;
const isLBA1 = getParams().game === 'lba1';
const Inventory = ({ game, closeInventory }: any) => {
const [selectedSlot, setSelectedSlot] = useState(game.getState().hero.inventorySlot);
const [clock, setClock] = useState(null);
const [canvas, setCanvas] = useState(null);
const [renderer, setRenderer] = useState(null);
const [invScenes, setInvScenes] = useState(null);
const [models, setModels] = useState(null);
const [invText, setInvText] = useState(null);
const inventoryRef = useRef(null);
const itemNodes = {};
for (let i = 0; i < GetInventoryRows(); i += 1) {
for (let j = 0; j < GetInventoryColumns(); j += 1) {
const slot = i * GetInventoryColumns() + j;
itemNodes[slot] = useRef(null);
}
}
const resolveItem = (slot: number) => {
const rawItemId = GetInventoryMapping()[slot];
const inventoryFlags = game.getState().flags.inventory;
const state = inventoryFlags[rawItemId];
const itemId = MapItem(rawItemId, state);
return itemId;
};
const resolveItemResource = (slot: number) => {
const itemId = resolveItem(slot);
const resourceId = GetItemResourceIndex(itemId);
return resourceId;
};
useEffect(() => {
setClock(createOverlayClock());
setCanvas(createOverlayCanvas('inventory-canvas'));
setModels([]);
const scenes = {};
for (let i = 0; i < GetInventoryRows(); i += 1) {
for (let j = 0; j < GetInventoryColumns(); j += 1) {
const slot = i * GetInventoryColumns() + j;
scenes[slot] = createOverlayScene(3);
}
}
setInvScenes(scenes);
}, []);
useEffect(() => {
if (canvas) {
if (renderer === null) {
setRenderer(createOverlayRenderer(canvas, 'inventory'));
}
inventoryRef.current.appendChild(canvas);
}
}, [canvas]);
useEffect(() => {
return () => {
if (renderer) {
renderer.dispose();
}
};
}, [renderer]);
const loadModelForResource = async (slot: number, resource: number) => {
models[slot] = {
model: await loadSceneInventoryModel(invScenes[slot], resource),
resourceId: resource,
};
setModels(models);
};
const loadModel = async (slot) => {
// Models depend on the item state (e.g. blowgun vs blowtron).
const resourceId = resolveItemResource(slot);
await loadModelForResource(slot, resourceId);
};
useEffect(() => {
if (clock) {
clock.start();
const questFlags = game.getState().flags.quest;
for (let i = 0; i < GetInventoryRows(); i += 1) {
for (let j = 0; j < GetInventoryColumns(); j += 1) {
const slot = i * GetInventoryColumns() + j;
if (questFlags[GetInventoryMapping()[slot]]) {
loadModel(slot);
}
}
}
}
return () => {
if (clock) {
clock.stop();
}
};
}, [clock]);
const listener = (key: string | number, controlsState: ControlsState) => {
let action = false;
let newSlot = -1;
if (key === 'ArrowLeft' || controlsState?.left === 1) {
if (selectedSlot % GetInventoryColumns() === 0) {
newSlot = selectedSlot + GetInventoryColumns() - 1;
} else {
newSlot = selectedSlot - 1;
}
action = true;
}
if (key === 'ArrowRight' || controlsState?.right === 1) {
if ((selectedSlot + 1) % GetInventoryColumns() === 0) {
newSlot = selectedSlot - GetInventoryColumns() + 1;
} else {
newSlot = selectedSlot + 1;
}
action = true;
}
if (key === 'ArrowUp' || controlsState?.up === 1) {
if (selectedSlot < GetInventoryColumns()) {
newSlot = selectedSlot + GetInventoryColumns() * (GetInventoryRows() - 1);
} else {
newSlot = selectedSlot - GetInventoryColumns();
}
action = true;
}
if (key === 'ArrowDown' || controlsState?.down === 1) {
if (selectedSlot >=
GetInventoryColumns() * GetInventoryRows() - GetInventoryColumns()) {
newSlot = selectedSlot - GetInventoryColumns() * (GetInventoryRows() - 1);
} else {
newSlot = selectedSlot + GetInventoryColumns();
}
action = true;
}
if (key === 'Enter' || controlsState?.action === 1) {
// Use the raw (state-independent) item ID here as it may trigger script actions.
const slot = game.getState().hero.inventorySlot;
const rawItemId = GetInventoryMapping()[slot];
const itemId = resolveItem(slot);
if (game.getState().flags.quest[rawItemId] >= 1 && CanUseItem(itemId)) {
heroUseItem(game, rawItemId, itemId);
closeInventory();
} else {
game.getAudioManager().playSample(SampleType.ERROR);
}
}
if (key === 'Escape' || controlsState?.shift === 1) {
closeInventory();
}
if (action) {
setSelectedSlot(newSlot);
game.getState().hero.inventorySlot = newSlot;
}
event.preventDefault();
event.stopPropagation();
};
const keyboardListener = (event) => {
listener(event.code, null);
};
const gamepadListener = (event) => {
listener(null, event.detail as ControlsState);
};
useEffect(() => {
window.addEventListener('keydown', keyboardListener);
window.addEventListener('lbagamepadchanged', gamepadListener);
return () => {
window.addEventListener('lbagamepadchanged', gamepadListener);
window.removeEventListener('keydown', keyboardListener);
};
});
const renderLoop = (time, slot, selected, item) => {
const model = models[slot];
const m = model?.model;
if (!item || !item.current || !m) {
return;
}
// If the item state has changed, request a reload and skip rendering.
const resourceId = resolveItemResource(slot);
if (model.resourceId !== resourceId) {
models[slot] = null;
loadModelForResource(slot, resourceId);
return;
}
const s = invScenes[slot];
const canvasClip = canvas.getBoundingClientRect();
const { left, bottom, width, height } = item.current.getBoundingClientRect();
// Set canvas size once with same aspect ratio as the inventory item box
if (canvas.width === 0) {
canvas.width = canvas.style.width = width * 8;
canvas.height = canvas.style.height = height * 8;
renderer.resize(canvas.width, canvas.height);
}
const itemLeft = left - canvasClip.left;
const itemBottom = canvasClip.bottom - bottom;
renderer.stats.begin();
renderer.setViewport(itemLeft, itemBottom, width, height);
renderer.setScissor(itemLeft, itemBottom, width, height);
renderer.setScissorTest(true);
s.camera.update(m, selected, { x: 0, y: 0 }, 2.5, time);
const h = m.boundingBox.max.y - m.boundingBox.min.y;
s.camera.controlNode.position.add(new THREE.Vector3(0, h * -0.5, 0));
renderer.render(s);
renderer.stats.end();
};
useEffect(() => {
if (renderer) {
renderer.threeRenderer.setAnimationLoop(() => {
const time = {
delta: Math.min(clock.getDelta(), 0.05),
elapsed: clock.getElapsedTime(),
};
const questFlags = game.getState().flags.quest;
for (let i = 0; i < GetInventoryRows(); i += 1) {
for (let j = 0; j < GetInventoryColumns(); j += 1) {
const slot = i * GetInventoryColumns() + j;
if (questFlags[GetInventoryMapping()[slot]]) {
renderLoop(time, slot, selectedSlot === slot, itemNodes[slot]);
}
}
}
});
}
return () => {
if (renderer) {
renderer.threeRenderer.setAnimationLoop(null);
}
};
}, [renderer, selectedSlot]);
useEffect(() => {
// Text depends on item state (e.g. blowgun vs blowtron).
const questFlags = game.getState().flags.quest;
const rawItemId = GetInventoryMapping()[selectedSlot];
const resourceId = resolveItemResource(selectedSlot);
if (questFlags[rawItemId]) {
getText(InventoryObjectsIndex).then((res) => {
setInvText(res[InventoryTextOffset + resourceId].value);
});
} else {
setInvText('');
}
}, [selectedSlot]);
const inventorySlots = [];
for (let i = 0; i < GetInventoryRows(); i += 1) {
for (let j = 0; j < GetInventoryColumns(); j += 1) {
const slot = i * GetInventoryColumns() + j;
const value = game.getState().flags.quest[GetInventoryMapping()[slot]];
const inInventory = value >= 1;
const itemId = resolveItem(slot);
const equipped = itemId === game.getState().hero.equippedItemId;
inventorySlots.push(
<div
ref={itemNodes[slot]}
key={slot}
className={`
inventoryItem ${selectedSlot === slot ? 'selected' : ''}
${inInventory === true ? 'inInventory' : ''}
${equipped === true ? 'equipped' : ''}
`}
>
{value > 1 && (
<div className="inventoryValue">
{value}
</div>
)}
</div>
);
}
}
return (
<div className={`
inventory
${isLBA1 === true ? 'inventoryLba1' : ''}
`}>
<div className={`
inventoryItems
${isLBA1 === true ? 'inventoryItemsLba1' : ''}
`}
ref={inventoryRef}
>
{inventorySlots}
</div>
<div className={`
inventoryText
${isLBA1 === true ? 'inventoryTextLba1' : ''}
`}>
<p>{invText}</p>
</div>
</div>
);
};
export default Inventory; | the_stack |
import {
AddonModAssignAssign,
AddonModAssignSubmission,
AddonModAssignPlugin,
AddonModAssign,
} from '@addons/mod/assign/services/assign';
import { AddonModAssignHelper } from '@addons/mod/assign/services/assign-helper';
import { AddonModAssignOffline, AddonModAssignSubmissionsDBRecordFormatted } from '@addons/mod/assign/services/assign-offline';
import { AddonModAssignSubmissionHandler } from '@addons/mod/assign/services/submission-delegate';
import { Injectable, Type } from '@angular/core';
import { CoreError } from '@classes/errors/error';
import { CoreFileHelper } from '@services/file-helper';
import { CoreTextUtils } from '@services/utils/text';
import { CoreUtils } from '@services/utils/utils';
import { CoreWSFile } from '@services/ws';
import { makeSingleton, Translate } from '@singletons';
import { AddonModAssignSubmissionOnlineTextComponent } from '../component/onlinetext';
/**
* Handler for online text submission plugin.
*/
@Injectable( { providedIn: 'root' })
export class AddonModAssignSubmissionOnlineTextHandlerService implements AddonModAssignSubmissionHandler {
name = 'AddonModAssignSubmissionOnlineTextHandler';
type = 'onlinetext';
/**
* Whether the plugin can be edited in offline for existing submissions. In general, this should return false if the
* plugin uses Moodle filters. The reason is that the app only prefetches filtered data, and the user should edit
* unfiltered data.
*
* @return Boolean or promise resolved with boolean: whether it can be edited in offline.
*/
canEditOffline(): boolean {
// This plugin uses Moodle filters, it cannot be edited in offline.
return false;
}
/**
* Check if a plugin has no data.
*
* @param assign The assignment.
* @param plugin The plugin object.
* @return Whether the plugin is empty.
*/
isEmpty(assign: AddonModAssignAssign, plugin: AddonModAssignPlugin): boolean {
const text = AddonModAssign.getSubmissionPluginText(plugin, true);
// If the text is empty, we can ignore files because they won't be visible anyways.
return text.trim().length === 0;
}
/**
* This function will be called when the user wants to create a new submission based on the previous one.
* It should add to pluginData the data to send to server based in the data in plugin (previous attempt).
*
* @param assign The assignment.
* @param plugin The plugin object.
* @param pluginData Object where to store the data to send.
* @param userId User ID. If not defined, site's current user.
* @param siteId Site ID. If not defined, current site.
* @return If the function is async, it should return a Promise resolved when done.
*/
async copySubmissionData(
assign: AddonModAssignAssign,
plugin: AddonModAssignPlugin,
pluginData: AddonModAssignSubmissionOnlineTextPluginData,
userId?: number,
siteId?: string,
): Promise<void> {
const text = AddonModAssign.getSubmissionPluginText(plugin, true);
const files = AddonModAssign.getSubmissionPluginAttachments(plugin);
let itemId = 0;
if (files.length) {
// Re-upload the files.
itemId = await AddonModAssignHelper.uploadFiles(assign.id, files, siteId);
}
pluginData.onlinetext_editor = {
text: text,
format: 1,
itemid: itemId,
};
}
/**
* Return the Component to use to display the plugin data, either in read or in edit mode.
* It's recommended to return the class of the component, but you can also return an instance of the component.
*
* @return The component (or promise resolved with component) to use, undefined if not found.
*/
getComponent(): Type<unknown> {
return AddonModAssignSubmissionOnlineTextComponent;
}
/**
* Get files used by this plugin.
* The files returned by this function will be prefetched when the user prefetches the assign.
*
* @param assign The assignment.
* @param submission The submission.
* @param plugin The plugin object.
* @return The files (or promise resolved with the files).
*/
getPluginFiles(
assign: AddonModAssignAssign,
submission: AddonModAssignSubmission,
plugin: AddonModAssignPlugin,
): CoreWSFile[] {
return AddonModAssign.getSubmissionPluginAttachments(plugin);
}
/**
* Get the size of data (in bytes) this plugin will send to copy a previous submission.
*
* @param assign The assignment.
* @param plugin The plugin object.
* @return The size (or promise resolved with size).
*/
async getSizeForCopy(assign: AddonModAssignAssign, plugin: AddonModAssignPlugin): Promise<number> {
const text = AddonModAssign.getSubmissionPluginText(plugin, true);
const files = AddonModAssign.getSubmissionPluginAttachments(plugin);
const filesSize = await CoreFileHelper.getTotalFilesSize(files);
return text.length + filesSize;
}
/**
* Get the size of data (in bytes) this plugin will send to add or edit a submission.
*
* @param assign The assignment.
* @param submission The submission.
* @param plugin The plugin object.
* @param inputData Data entered by the user for the submission.
* @return The size (or promise resolved with size).
*/
getSizeForEdit(
assign: AddonModAssignAssign,
submission: AddonModAssignSubmission,
plugin: AddonModAssignPlugin,
): number {
const text = AddonModAssign.getSubmissionPluginText(plugin, true);
return text.length;
}
/**
* Get the text to submit.
*
* @param plugin The plugin object.
* @param inputData Data entered by the user for the submission.
* @return Text to submit.
*/
protected getTextToSubmit(plugin: AddonModAssignPlugin, inputData: AddonModAssignSubmissionOnlineTextData): string {
const text = inputData.onlinetext_editor_text;
const files = plugin.fileareas && plugin.fileareas[0] && plugin.fileareas[0].files || [];
return CoreTextUtils.restorePluginfileUrls(text, files || []);
}
/**
* Check if the submission data has changed for this plugin.
*
* @param assign The assignment.
* @param submission The submission.
* @param plugin The plugin object.
* @param inputData Data entered by the user for the submission.
* @return Boolean (or promise resolved with boolean): whether the data has changed.
*/
async hasDataChanged(
assign: AddonModAssignAssign,
submission: AddonModAssignSubmission,
plugin: AddonModAssignPlugin,
inputData: AddonModAssignSubmissionOnlineTextData,
): Promise<boolean> {
// Get the original text from plugin or offline.
const offlineData =
await CoreUtils.ignoreErrors(AddonModAssignOffline.getSubmission(assign.id, submission.userid));
let initialText = '';
if (offlineData && offlineData.plugindata && offlineData.plugindata.onlinetext_editor) {
initialText = (<AddonModAssignSubmissionOnlineTextPluginData>offlineData.plugindata).onlinetext_editor.text;
} else {
// No offline data found, get text from plugin.
initialText = plugin.editorfields && plugin.editorfields[0] ? plugin.editorfields[0].text : '';
}
// Check if text has changed.
return initialText != this.getTextToSubmit(plugin, inputData);
}
/**
* Whether or not the handler is enabled on a site level.
*
* @return True or promise resolved with true if enabled.
*/
async isEnabled(): Promise<boolean> {
return true;
}
/**
* Whether or not the handler is enabled for edit on a site level.
*
* @return Whether or not the handler is enabled for edit on a site level.
*/
isEnabledForEdit(): boolean {
return true;
}
/**
* Prepare and add to pluginData the data to send to the server based on the input data.
*
* @param assign The assignment.
* @param submission The submission.
* @param plugin The plugin object.
* @param inputData Data entered by the user for the submission.
* @param pluginData Object where to store the data to send.
* @param offline Whether the user is editing in offline.
* @param userId User ID. If not defined, site's current user.
* @param siteId Site ID. If not defined, current site.
* @return If the function is async, it should return a Promise resolved when done.
*/
prepareSubmissionData(
assign: AddonModAssignAssign,
submission: AddonModAssignSubmission,
plugin: AddonModAssignPlugin,
inputData: AddonModAssignSubmissionOnlineTextData,
pluginData: AddonModAssignSubmissionOnlineTextPluginData,
): void | Promise<void> {
let text = this.getTextToSubmit(plugin, inputData);
// Check word limit.
const configs = AddonModAssignHelper.getPluginConfig(assign, 'assignsubmission', plugin.type);
if (parseInt(configs.wordlimitenabled, 10)) {
const words = CoreTextUtils.countWords(text);
const wordlimit = parseInt(configs.wordlimit, 10);
if (words > wordlimit) {
const params = { $a: { count: words, limit: wordlimit } };
const message = Translate.instant('addon.mod_assign_submission_onlinetext.wordlimitexceeded', params);
throw new CoreError(message);
}
}
// Add some HTML to the text if needed.
text = CoreTextUtils.formatHtmlLines(text);
pluginData.onlinetext_editor = {
text: text,
format: 1,
itemid: 0, // Can't add new files yet, so we use a fake itemid.
};
}
/**
* Prepare and add to pluginData the data to send to the server based on the offline data stored.
* This will be used when performing a synchronization.
*
* @param assign The assignment.
* @param submission The submission.
* @param plugin The plugin object.
* @param offlineData Offline data stored.
* @param pluginData Object where to store the data to send.
* @param siteId Site ID. If not defined, current site.
* @return If the function is async, it should return a Promise resolved when done.
*/
prepareSyncData(
assign: AddonModAssignAssign,
submission: AddonModAssignSubmission,
plugin: AddonModAssignPlugin,
offlineData: AddonModAssignSubmissionsDBRecordFormatted,
pluginData: AddonModAssignSubmissionOnlineTextPluginData,
): void | Promise<void> {
const offlinePluginData = <AddonModAssignSubmissionOnlineTextPluginData>(offlineData && offlineData.plugindata);
const textData = offlinePluginData.onlinetext_editor;
if (textData) {
// Has some data to sync.
pluginData.onlinetext_editor = textData;
}
}
}
export const AddonModAssignSubmissionOnlineTextHandler = makeSingleton(AddonModAssignSubmissionOnlineTextHandlerService);
export type AddonModAssignSubmissionOnlineTextData = {
// The text for this submission.
onlinetext_editor_text: string; // eslint-disable-line @typescript-eslint/naming-convention
};
export type AddonModAssignSubmissionOnlineTextPluginData = {
// Editor structure.
onlinetext_editor: { // eslint-disable-line @typescript-eslint/naming-convention
text: string; // The text for this submission.
format: number; // The format for this submission.
itemid: number; // The draft area id for files attached to the submission.
};
}; | the_stack |
import * as assert from 'assert';
import { URI } from 'vs/base/common/uri';
import { IEditorService } from 'vs/workbench/services/editor/common/editorService';
import { EditorPart } from 'vs/workbench/browser/parts/editor/editorPart';
import { IEditorGroupsService } from 'vs/workbench/services/editor/common/editorGroupsService';
import { EditorService } from 'vs/workbench/services/editor/browser/editorService';
import { IUntitledTextResourceEditorInput } from 'vs/workbench/common/editor';
import { IWorkingCopyBackupService } from 'vs/workbench/services/workingCopy/common/workingCopyBackup';
import { toResource } from 'vs/base/test/common/utils';
import { IFilesConfigurationService } from 'vs/workbench/services/filesConfiguration/common/filesConfigurationService';
import { IWorkingCopyService } from 'vs/workbench/services/workingCopy/common/workingCopyService';
import { IWorkingCopyBackup } from 'vs/workbench/services/workingCopy/common/workingCopy';
import { ILogService } from 'vs/platform/log/common/log';
import { ILifecycleService, LifecyclePhase } from 'vs/workbench/services/lifecycle/common/lifecycle';
import { WorkingCopyBackupTracker } from 'vs/workbench/services/workingCopy/common/workingCopyBackupTracker';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { UntitledTextEditorInput } from 'vs/workbench/services/untitled/common/untitledTextEditorInput';
import { createEditorPart, InMemoryTestWorkingCopyBackupService, registerTestResourceEditor, TestServiceAccessor, toTypedWorkingCopyId, toUntypedWorkingCopyId, workbenchInstantiationService } from 'vs/workbench/test/browser/workbenchTestServices';
import { TestWorkingCopy } from 'vs/workbench/test/common/workbenchTestServices';
import { CancellationToken } from 'vs/base/common/cancellation';
import { timeout } from 'vs/base/common/async';
import { BrowserWorkingCopyBackupTracker } from 'vs/workbench/services/workingCopy/browser/workingCopyBackupTracker';
import { DisposableStore, dispose, IDisposable } from 'vs/base/common/lifecycle';
import { IWorkingCopyEditorHandler, IWorkingCopyEditorService } from 'vs/workbench/services/workingCopy/common/workingCopyEditorService';
import { bufferToReadable, VSBuffer } from 'vs/base/common/buffer';
import { isWindows } from 'vs/base/common/platform';
import { Schemas } from 'vs/base/common/network';
import { IWorkspaceTrustRequestService } from 'vs/platform/workspace/common/workspaceTrust';
import { TestWorkspaceTrustRequestService } from 'vs/workbench/services/workspaces/test/common/testWorkspaceTrustService';
suite('WorkingCopyBackupTracker (browser)', function () {
let accessor: TestServiceAccessor;
let disposables = new DisposableStore();
setup(() => {
disposables.add(registerTestResourceEditor());
});
teardown(() => {
disposables.clear();
});
class TestWorkingCopyBackupTracker extends BrowserWorkingCopyBackupTracker {
constructor(
@IWorkingCopyBackupService workingCopyBackupService: IWorkingCopyBackupService,
@IFilesConfigurationService filesConfigurationService: IFilesConfigurationService,
@IWorkingCopyService workingCopyService: IWorkingCopyService,
@ILifecycleService lifecycleService: ILifecycleService,
@ILogService logService: ILogService,
@IWorkingCopyEditorService workingCopyEditorService: IWorkingCopyEditorService,
@IEditorService editorService: IEditorService
) {
super(workingCopyBackupService, filesConfigurationService, workingCopyService, lifecycleService, logService, workingCopyEditorService, editorService);
}
protected override getBackupScheduleDelay(): number {
return 10; // Reduce timeout for tests
}
getUnrestoredBackups() {
return this.unrestoredBackups;
}
override async restoreBackups(handler: IWorkingCopyEditorHandler): Promise<void> {
return super.restoreBackups(handler);
}
}
class TestUntitledTextEditorInput extends UntitledTextEditorInput {
resolved = false;
override resolve() {
this.resolved = true;
return super.resolve();
}
}
async function createTracker(): Promise<{ accessor: TestServiceAccessor, part: EditorPart, tracker: WorkingCopyBackupTracker, workingCopyBackupService: InMemoryTestWorkingCopyBackupService, instantiationService: IInstantiationService, cleanup: () => void }> {
const disposables = new DisposableStore();
const workingCopyBackupService = new InMemoryTestWorkingCopyBackupService();
const instantiationService = workbenchInstantiationService();
instantiationService.stub(IWorkingCopyBackupService, workingCopyBackupService);
const part = await createEditorPart(instantiationService, disposables);
disposables.add(registerTestResourceEditor());
instantiationService.stub(IEditorGroupsService, part);
instantiationService.stub(IWorkspaceTrustRequestService, new TestWorkspaceTrustRequestService(false));
const editorService: EditorService = instantiationService.createInstance(EditorService);
instantiationService.stub(IEditorService, editorService);
accessor = instantiationService.createInstance(TestServiceAccessor);
const tracker = disposables.add(instantiationService.createInstance(TestWorkingCopyBackupTracker));
return { accessor, part, tracker, workingCopyBackupService: workingCopyBackupService, instantiationService, cleanup: () => disposables.dispose() };
}
async function untitledBackupTest(untitled: IUntitledTextResourceEditorInput = {}): Promise<void> {
const { accessor, cleanup, workingCopyBackupService } = await createTracker();
const untitledTextEditor = (await accessor.editorService.openEditor(untitled))?.input as UntitledTextEditorInput;
const untitledTextModel = await untitledTextEditor.resolve();
if (!untitled?.contents) {
untitledTextModel.textEditorModel?.setValue('Super Good');
}
await workingCopyBackupService.joinBackupResource();
assert.strictEqual(workingCopyBackupService.hasBackupSync(untitledTextModel), true);
untitledTextModel.dispose();
await workingCopyBackupService.joinDiscardBackup();
assert.strictEqual(workingCopyBackupService.hasBackupSync(untitledTextModel), false);
cleanup();
}
test('Track backups (untitled)', function () {
return untitledBackupTest();
});
test('Track backups (untitled with initial contents)', function () {
return untitledBackupTest({ contents: 'Foo Bar' });
});
test('Track backups (custom)', async function () {
const { accessor, cleanup, workingCopyBackupService } = await createTracker();
class TestBackupWorkingCopy extends TestWorkingCopy {
backupDelay = 0;
constructor(resource: URI) {
super(resource);
accessor.workingCopyService.registerWorkingCopy(this);
}
override async backup(token: CancellationToken): Promise<IWorkingCopyBackup> {
await timeout(this.backupDelay);
return {};
}
}
const resource = toResource.call(this, '/path/custom.txt');
const customWorkingCopy = new TestBackupWorkingCopy(resource);
// Normal
customWorkingCopy.setDirty(true);
await workingCopyBackupService.joinBackupResource();
assert.strictEqual(workingCopyBackupService.hasBackupSync(customWorkingCopy), true);
customWorkingCopy.setDirty(false);
customWorkingCopy.setDirty(true);
await workingCopyBackupService.joinBackupResource();
assert.strictEqual(workingCopyBackupService.hasBackupSync(customWorkingCopy), true);
customWorkingCopy.setDirty(false);
await workingCopyBackupService.joinDiscardBackup();
assert.strictEqual(workingCopyBackupService.hasBackupSync(customWorkingCopy), false);
// Cancellation
customWorkingCopy.setDirty(true);
await timeout(0);
customWorkingCopy.setDirty(false);
await workingCopyBackupService.joinDiscardBackup();
assert.strictEqual(workingCopyBackupService.hasBackupSync(customWorkingCopy), false);
customWorkingCopy.dispose();
cleanup();
});
async function restoreBackupsInit(): Promise<[TestWorkingCopyBackupTracker, TestServiceAccessor, IDisposable]> {
const fooFile = URI.file(isWindows ? 'c:\\Foo' : '/Foo');
const barFile = URI.file(isWindows ? 'c:\\Bar' : '/Bar');
const untitledFile1 = URI.from({ scheme: Schemas.untitled, path: 'Untitled-1' });
const untitledFile2 = URI.from({ scheme: Schemas.untitled, path: 'Untitled-2' });
const disposables = new DisposableStore();
const workingCopyBackupService = new InMemoryTestWorkingCopyBackupService();
const instantiationService = workbenchInstantiationService();
instantiationService.stub(IWorkingCopyBackupService, workingCopyBackupService);
const part = await createEditorPart(instantiationService, disposables);
instantiationService.stub(IEditorGroupsService, part);
instantiationService.stub(IWorkspaceTrustRequestService, new TestWorkspaceTrustRequestService(false));
const editorService: EditorService = instantiationService.createInstance(EditorService);
instantiationService.stub(IEditorService, editorService);
accessor = instantiationService.createInstance(TestServiceAccessor);
// Backup 2 normal files and 2 untitled files
const untitledFile1WorkingCopyId = toUntypedWorkingCopyId(untitledFile1);
const untitledFile2WorkingCopyId = toTypedWorkingCopyId(untitledFile2);
await workingCopyBackupService.backup(untitledFile1WorkingCopyId, bufferToReadable(VSBuffer.fromString('untitled-1')));
await workingCopyBackupService.backup(untitledFile2WorkingCopyId, bufferToReadable(VSBuffer.fromString('untitled-2')));
const fooFileWorkingCopyId = toUntypedWorkingCopyId(fooFile);
const barFileWorkingCopyId = toTypedWorkingCopyId(barFile);
await workingCopyBackupService.backup(fooFileWorkingCopyId, bufferToReadable(VSBuffer.fromString('fooFile')));
await workingCopyBackupService.backup(barFileWorkingCopyId, bufferToReadable(VSBuffer.fromString('barFile')));
const tracker = disposables.add(instantiationService.createInstance(TestWorkingCopyBackupTracker));
accessor.lifecycleService.phase = LifecyclePhase.Restored;
return [tracker, accessor, disposables];
}
test('Restore backups (basics, some handled)', async function () {
const [tracker, accessor, disposables] = await restoreBackupsInit();
assert.strictEqual(tracker.getUnrestoredBackups().size, 0);
let handlesCounter = 0;
let isOpenCounter = 0;
let createEditorCounter = 0;
await tracker.restoreBackups({
handles: workingCopy => {
handlesCounter++;
return workingCopy.typeId === 'testBackupTypeId';
},
isOpen: (workingCopy, editor) => {
isOpenCounter++;
return false;
},
createEditor: workingCopy => {
createEditorCounter++;
return accessor.instantiationService.createInstance(TestUntitledTextEditorInput, accessor.untitledTextEditorService.create({ initialValue: 'foo' }));
}
});
assert.strictEqual(handlesCounter, 4);
assert.strictEqual(isOpenCounter, 0);
assert.strictEqual(createEditorCounter, 2);
assert.strictEqual(accessor.editorService.count, 2);
assert.ok(accessor.editorService.editors.every(editor => editor.isDirty()));
assert.strictEqual(tracker.getUnrestoredBackups().size, 2);
for (const editor of accessor.editorService.editors) {
assert.ok(editor instanceof TestUntitledTextEditorInput);
assert.strictEqual(editor.resolved, true);
}
dispose(disposables);
});
test('Restore backups (basics, none handled)', async function () {
const [tracker, accessor, disposables] = await restoreBackupsInit();
await tracker.restoreBackups({
handles: workingCopy => false,
isOpen: (workingCopy, editor) => { throw new Error('unexpected'); },
createEditor: workingCopy => { throw new Error('unexpected'); }
});
assert.strictEqual(accessor.editorService.count, 0);
assert.strictEqual(tracker.getUnrestoredBackups().size, 4);
dispose(disposables);
});
test('Restore backups (basics, error case)', async function () {
const [tracker, , disposables] = await restoreBackupsInit();
try {
await tracker.restoreBackups({
handles: workingCopy => true,
isOpen: (workingCopy, editor) => { throw new Error('unexpected'); },
createEditor: workingCopy => { throw new Error('unexpected'); }
});
} catch (error) {
// ignore
}
assert.strictEqual(tracker.getUnrestoredBackups().size, 4);
dispose(disposables);
});
test('Restore backups (multiple handlers)', async function () {
const [tracker, accessor, disposables] = await restoreBackupsInit();
const firstHandler = tracker.restoreBackups({
handles: workingCopy => {
return workingCopy.typeId === 'testBackupTypeId';
},
isOpen: (workingCopy, editor) => {
return false;
},
createEditor: workingCopy => {
return accessor.instantiationService.createInstance(TestUntitledTextEditorInput, accessor.untitledTextEditorService.create({ initialValue: 'foo' }));
}
});
const secondHandler = tracker.restoreBackups({
handles: workingCopy => {
return workingCopy.typeId.length === 0;
},
isOpen: (workingCopy, editor) => {
return false;
},
createEditor: workingCopy => {
return accessor.instantiationService.createInstance(TestUntitledTextEditorInput, accessor.untitledTextEditorService.create({ initialValue: 'foo' }));
}
});
await Promise.all([firstHandler, secondHandler]);
assert.strictEqual(accessor.editorService.count, 4);
assert.ok(accessor.editorService.editors.every(editor => editor.isDirty()));
assert.strictEqual(tracker.getUnrestoredBackups().size, 0);
for (const editor of accessor.editorService.editors) {
assert.ok(editor instanceof TestUntitledTextEditorInput);
assert.strictEqual(editor.resolved, true);
}
dispose(disposables);
});
test('Restore backups (editors already opened)', async function () {
const [tracker, accessor, disposables] = await restoreBackupsInit();
assert.strictEqual(tracker.getUnrestoredBackups().size, 0);
let handlesCounter = 0;
let isOpenCounter = 0;
const editor1 = accessor.instantiationService.createInstance(TestUntitledTextEditorInput, accessor.untitledTextEditorService.create({ initialValue: 'foo' }));
const editor2 = accessor.instantiationService.createInstance(TestUntitledTextEditorInput, accessor.untitledTextEditorService.create({ initialValue: 'foo' }));
await accessor.editorService.openEditors([{ editor: editor1 }, { editor: editor2 }]);
editor1.resolved = false;
editor2.resolved = false;
await tracker.restoreBackups({
handles: workingCopy => {
handlesCounter++;
return workingCopy.typeId === 'testBackupTypeId';
},
isOpen: (workingCopy, editor) => {
isOpenCounter++;
return true;
},
createEditor: workingCopy => { throw new Error('unexpected'); }
});
assert.strictEqual(handlesCounter, 4);
assert.strictEqual(isOpenCounter, 4);
assert.strictEqual(accessor.editorService.count, 2);
assert.strictEqual(tracker.getUnrestoredBackups().size, 2);
for (const editor of accessor.editorService.editors) {
assert.ok(editor instanceof TestUntitledTextEditorInput);
assert.strictEqual(editor.resolved, true);
}
dispose(disposables);
});
}); | the_stack |
import * as msRest from "@azure/ms-rest-js";
import * as Mappers from "../artifacts/mappers";
import Context, { IHandlerParameters } from "../Context";
import IRequest from "../IRequest";
import IResponse from "../IResponse";
import ILogger from "./ILogger";
import { parseXML, stringifyXML } from "./xml";
export declare type ParameterPath =
| string
| string[]
| {
[propertyName: string]: ParameterPath;
};
export async function deserialize(
context: Context,
req: IRequest,
spec: msRest.OperationSpec,
logger: ILogger
): Promise<IHandlerParameters> {
const parameters: IHandlerParameters = {};
// Deserialize query parameters
for (const queryParameter of spec.queryParameters || []) {
if (!queryParameter.mapper.serializedName) {
throw new TypeError(
`QueryParameter mapper doesn't include valid "serializedName"`
);
}
const queryKey = queryParameter.mapper.serializedName;
let queryValueOriginal: string | string[] | undefined = req.getQuery(
queryKey
);
if (
queryValueOriginal !== undefined &&
queryParameter.collectionFormat !== undefined &&
queryParameter.mapper.type.name === "Sequence"
) {
queryValueOriginal = `${queryValueOriginal}`.split(
queryParameter.collectionFormat
);
}
const queryValue = spec.serializer.deserialize(
queryParameter.mapper,
queryValueOriginal,
queryKey
);
// TODO: Currently validation is only in serialize method,
// remove when adding validateConstraints to deserialize()
// TODO: Make serialize return ServerError according to different validations?
spec.serializer.serialize(queryParameter.mapper, queryValue);
setParametersValue(parameters, queryParameter.parameterPath, queryValue);
}
// Deserialize header parameters
for (const headerParameter of spec.headerParameters || []) {
if (!headerParameter.mapper.serializedName) {
throw new TypeError(
`HeaderParameter mapper doesn't include valid "serializedName"`
);
}
const headerCollectionPrefix:
| string
| undefined = (headerParameter.mapper as msRest.DictionaryMapper)
.headerCollectionPrefix;
if (headerCollectionPrefix) {
const dictionary: any = {};
const headers = req.getHeaders();
for (const headerKey of Object.keys(headers)) {
if (
headerKey
.toLowerCase()
.startsWith(headerCollectionPrefix.toLocaleLowerCase())
) {
// TODO: Validate collection type by serializer
dictionary[
headerKey.substring(headerCollectionPrefix.length)
] = spec.serializer.serialize(
(headerParameter.mapper as msRest.DictionaryMapper).type.value,
headers[headerKey],
headerKey
);
}
}
setParametersValue(parameters, headerParameter.parameterPath, dictionary);
} else {
const headerKey = headerParameter.mapper.serializedName;
const headerValueOriginal = req.getHeader(headerKey);
const headerValue = spec.serializer.deserialize(
headerParameter.mapper,
headerValueOriginal,
headerKey
);
// TODO: Currently validation is only in serialize method,
// remove when adding validateConstraints to deserialize()
spec.serializer.serialize(headerParameter.mapper, headerValue);
setParametersValue(
parameters,
headerParameter.parameterPath,
headerValue
);
}
}
// Deserialize body
const bodyParameter = spec.requestBody;
if (bodyParameter && bodyParameter.mapper.type.name === "Stream") {
setParametersValue(parameters, "body", req.getBodyStream());
} else if (bodyParameter) {
const jsonContentTypes = ["application/json", "text/json"];
const xmlContentTypes = ["application/xml", "application/atom+xml"];
const contentType = req.getHeader("content-type") || "";
const contentComponents = !contentType
? []
: contentType.split(";").map(component => component.toLowerCase());
const isRequestWithJSON = contentComponents.some(
component => jsonContentTypes.indexOf(component) !== -1
); // TODO
const isRequestWithXML =
spec.isXML ||
contentComponents.some(
component => xmlContentTypes.indexOf(component) !== -1
);
// const isRequestWithStream = false;
const body = await readRequestIntoText(req);
logger.debug(
`deserialize(): Raw request body string is (removed all empty characters) ${body.replace(
/\s/g,
""
)}`,
context.contextID
);
req.setBody(body);
let parsedBody: object = {};
if (isRequestWithJSON) {
// read body
parsedBody = JSON.parse(body);
} else if (isRequestWithXML) {
parsedBody = (await parseXML(body)) || {};
}
let valueToDeserialize: any = parsedBody;
if (
spec.isXML &&
bodyParameter.mapper.type.name === msRest.MapperType.Sequence
) {
valueToDeserialize =
typeof valueToDeserialize === "object"
? valueToDeserialize[bodyParameter.mapper.xmlElementName!]
: [];
}
parsedBody = spec.serializer.deserialize(
bodyParameter.mapper,
valueToDeserialize,
bodyParameter.mapper.serializedName!
);
// Validation purpose only, because only serialize supports validation
// TODO: Inject convenience layer error into deserialize; Drop @azure/ms-rest-js, move logic into generated code
spec.serializer.serialize(bodyParameter.mapper, parsedBody);
setParametersValue(parameters, bodyParameter.parameterPath, parsedBody);
setParametersValue(parameters, "body", req.getBody());
}
return parameters;
}
async function readRequestIntoText(req: IRequest): Promise<string> {
return new Promise<string>((resolve, reject) => {
const segments: string[] = [];
const bodyStream = req.getBodyStream();
bodyStream.on("data", buffer => {
segments.push(buffer);
});
bodyStream.on("error", reject);
bodyStream.on("end", () => {
const joined = segments.join("");
resolve(joined);
});
});
}
function setParametersValue(
parameters: IHandlerParameters,
parameterPath: ParameterPath,
parameterValue: any
) {
if (typeof parameterPath === "string") {
parameters[parameterPath] = parameterValue;
} else if (Array.isArray(parameterPath)) {
let leafParent = parameters;
for (let i = 0; i < parameterPath.length - 1; i++) {
const currentPropertyName = parameterPath[i];
if (!leafParent[currentPropertyName]) {
leafParent[currentPropertyName] = {};
}
leafParent = leafParent[currentPropertyName];
}
const lastPropertyName = parameterPath[parameterPath.length - 1];
leafParent[lastPropertyName] = parameterValue;
} else {
throw new TypeError(`parameterPath is not string or string[]`);
}
}
export async function serialize(
context: Context,
res: IResponse,
spec: msRest.OperationSpec,
handlerResponse: any,
logger: ILogger
): Promise<void> {
const statusCodeInResponse: number = handlerResponse.statusCode;
res.setStatusCode(statusCodeInResponse);
const responseSpec = spec.responses[statusCodeInResponse];
if (!responseSpec) {
throw new TypeError(
`Request specification doesn't include provided response status code`
);
}
// Serialize headers
const headerSerializer = new msRest.Serializer(Mappers);
const headersMapper = responseSpec.headersMapper;
if (headersMapper && headersMapper.type.name === "Composite") {
const mappersForAllHeaders = headersMapper.type.modelProperties || {};
// Handle headerMapper one by one
for (const key in mappersForAllHeaders) {
if (mappersForAllHeaders.hasOwnProperty(key)) {
const headerMapper = mappersForAllHeaders[key];
const headerName = headerMapper.serializedName;
const headerValueOriginal = handlerResponse[key];
const headerValueSerialized = headerSerializer.serialize(
headerMapper,
headerValueOriginal
);
// Handle collection of headers starting with same prefix, such as x-ms-meta prefix
const headerCollectionPrefix = (headerMapper as msRest.DictionaryMapper)
.headerCollectionPrefix;
if (
headerCollectionPrefix !== undefined &&
headerValueOriginal !== undefined
) {
for (const collectionHeaderPartialName in headerValueSerialized) {
if (
headerValueSerialized.hasOwnProperty(collectionHeaderPartialName)
) {
const collectionHeaderValueSerialized =
headerValueSerialized[collectionHeaderPartialName];
const collectionHeaderName = `${headerCollectionPrefix}${collectionHeaderPartialName}`;
if (
collectionHeaderName &&
collectionHeaderValueSerialized !== undefined
) {
res.setHeader(
collectionHeaderName,
collectionHeaderValueSerialized
);
}
}
}
} else {
if (headerName && headerValueSerialized !== undefined) {
res.setHeader(headerName, headerValueSerialized);
}
}
}
}
}
// Serialize XML bodies
if (
spec.isXML &&
responseSpec.bodyMapper &&
responseSpec.bodyMapper.type.name !== "Stream"
) {
let body = spec.serializer.serialize(
responseSpec.bodyMapper!,
handlerResponse
);
// When root element is sequence type, should wrap with because serialize() doesn't do that
if (responseSpec.bodyMapper!.type.name === "Sequence") {
const sequenceElementName = responseSpec.bodyMapper!.xmlElementName;
if (sequenceElementName !== undefined) {
const newBody = {} as any;
newBody[sequenceElementName] = body;
body = newBody;
}
}
const xmlBody = stringifyXML(body, {
rootName:
responseSpec.bodyMapper!.xmlName ||
responseSpec.bodyMapper!.serializedName
});
res.setContentType(`application/xml`);
// TODO: Should send response in a serializer?
res.getBodyStream().write(xmlBody);
logger.debug(
`Serializer: Raw response body string is ${xmlBody}`,
context.contextID
);
logger.info(`Serializer: Start returning stream body.`, context.contextID);
}
// Serialize JSON bodies
if (
!spec.isXML &&
responseSpec.bodyMapper &&
responseSpec.bodyMapper.type.name !== "Stream"
) {
let body = spec.serializer.serialize(
responseSpec.bodyMapper!,
handlerResponse
);
// When root element is sequence type, should wrap with because serialize() doesn't do that
if (responseSpec.bodyMapper!.type.name === "Sequence") {
const sequenceElementName = responseSpec.bodyMapper!.xmlElementName;
if (sequenceElementName !== undefined) {
const newBody = {} as any;
newBody[sequenceElementName] = body;
body = newBody;
}
}
if (!res.getHeader("content-type")) {
res.setContentType("application/json");
}
const jsonBody = JSON.stringify(body);
// TODO: Should send response in a serializer?
res.getBodyStream().write(jsonBody);
logger.debug(
`Serializer: Raw response body string is ${jsonBody}`,
context.contextID
);
logger.info(`Serializer: Start returning stream body.`, context.contextID);
}
// Serialize stream body
// TODO: Move to end middleware for end tracking
if (
handlerResponse.body &&
responseSpec.bodyMapper &&
responseSpec.bodyMapper.type.name === "Stream"
) {
logger.info(`Serializer: Start returning stream body.`, context.contextID);
await new Promise<void>((resolve, reject) => {
(handlerResponse.body as NodeJS.ReadableStream)
.on("error", reject)
.on("end", resolve)
.pipe(res.getBodyStream())
.on("error", reject)
.on("close", resolve);
});
// const totalTimeInMS = context.startTime
// ? new Date().getTime() - context.startTime.getTime()
// : undefined;
// logger.info(
// tslint:disable-next-line:max-line-length
// `Serializer: End response. TotalTimeInMS=${totalTimeInMS} StatusCode=${res.getStatusCode()} StatusMessage=${res.getStatusMessage()} Headers=${JSON.stringify(
// res.getHeaders()
// )}`,
// context.contextID
// );
}
} | the_stack |
import type { Dictionary, EntityMetadata, EntityProperty, NamingStrategy } from '@mikro-orm/core';
import { Cascade, DecimalType, EntitySchema, ReferenceType, t, Utils } from '@mikro-orm/core';
import type { SchemaHelper } from './SchemaHelper';
import type { Column, ForeignKey, Index } from '../typings';
import type { AbstractSqlPlatform } from '../AbstractSqlPlatform';
/**
* @internal
*/
export class DatabaseTable {
private columns: Dictionary<Column> = {};
private indexes: Index[] = [];
private foreignKeys: Dictionary<ForeignKey> = {};
public comment?: string;
constructor(private readonly platform: AbstractSqlPlatform,
readonly name: string,
readonly schema?: string) { }
getColumns(): Column[] {
return Object.values(this.columns);
}
getColumn(name: string): Column | undefined {
return this.columns[name];
}
getIndexes(): Index[] {
return this.indexes;
}
init(cols: Column[], indexes: Index[], pks: string[], fks: Dictionary<ForeignKey>, enums: Dictionary<string[]>): void {
this.indexes = indexes;
this.foreignKeys = fks;
this.columns = cols.reduce((o, v) => {
const index = indexes.filter(i => i.columnNames[0] === v.name);
v.primary = v.primary || pks.includes(v.name);
v.unique = index.some(i => i.unique && !i.primary);
const type = v.name in enums ? 'enum' : v.type;
v.mappedType = this.platform.getMappedType(type);
v.default = v.default?.toString().startsWith('nextval(') ? null : v.default;
v.enumItems = enums[v.name] || [];
o[v.name] = v;
return o;
}, {} as Dictionary<Column>);
}
addColumn(column: Column) {
this.columns[column.name] = column;
}
addColumnFromProperty(prop: EntityProperty, meta: EntityMetadata) {
prop.fieldNames.forEach((field, idx) => {
const type = prop.enum ? 'enum' : prop.columnTypes[idx];
const mappedType = this.platform.getMappedType(type);
if (mappedType instanceof DecimalType) {
const match = prop.columnTypes[idx].match(/\w+\((\d+), ?(\d+)\)/);
/* istanbul ignore else */
if (match) {
prop.precision = +match[1];
prop.scale = +match[2];
prop.length = undefined;
}
}
const primary = !meta.compositePK && !!prop.primary && prop.reference === ReferenceType.SCALAR && this.platform.isNumericColumn(mappedType);
this.columns[field] = {
name: prop.fieldNames[idx],
type: prop.columnTypes[idx],
mappedType,
unsigned: prop.unsigned && this.platform.isNumericColumn(mappedType),
autoincrement: prop.autoincrement ?? primary,
primary,
nullable: !!prop.nullable,
length: prop.length,
precision: prop.precision,
scale: prop.scale,
default: prop.defaultRaw,
enumItems: prop.items as string[],
comment: prop.comment,
};
this.columns[field].unsigned ||= this.columns[field].autoincrement;
const defaultValue = this.platform.getSchemaHelper()!.normalizeDefaultValue(prop.defaultRaw!, prop.length);
this.columns[field].default = defaultValue as string;
});
if ([ReferenceType.MANY_TO_ONE, ReferenceType.ONE_TO_ONE].includes(prop.reference)) {
const constraintName = this.getIndexName(true, prop.fieldNames, 'foreign');
this.foreignKeys[constraintName] = {
constraintName,
columnNames: prop.fieldNames,
localTableName: this.getShortestName(),
referencedColumnNames: prop.referencedColumnNames,
referencedTableName: prop.targetMeta!.schema ? `${prop.targetMeta!.schema}.${prop.referencedTableName}` : prop.referencedTableName,
};
const cascade = prop.cascade.includes(Cascade.REMOVE) || prop.cascade.includes(Cascade.ALL);
if (prop.onDelete || cascade || prop.nullable) {
this.foreignKeys[constraintName].deleteRule = prop.onDelete || (cascade ? 'cascade' : 'set null');
}
if (prop.onUpdateIntegrity || prop.cascade.includes(Cascade.PERSIST) || prop.cascade.includes(Cascade.ALL)) {
this.foreignKeys[constraintName].updateRule = prop.onUpdateIntegrity || 'cascade';
}
}
if (prop.index) {
this.indexes.push({
columnNames: prop.fieldNames,
composite: prop.fieldNames.length > 1,
keyName: this.getIndexName(prop.index, prop.fieldNames, 'index'),
primary: false,
unique: false,
});
}
if (prop.unique && !(prop.primary && !meta.compositePK)) {
this.indexes.push({
columnNames: prop.fieldNames,
composite: prop.fieldNames.length > 1,
keyName: this.getIndexName(prop.unique, prop.fieldNames, 'unique'),
primary: false,
unique: true,
});
}
}
private getIndexName(value: boolean | string, columnNames: string[], type: 'unique' | 'index' | 'primary' | 'foreign'): string {
if (Utils.isString(value)) {
return value;
}
return this.platform.getIndexName(this.name, columnNames, type);
}
getEntityDeclaration(namingStrategy: NamingStrategy, schemaHelper: SchemaHelper): EntityMetadata {
const name = namingStrategy.getClassName(this.name, '_');
const schema = new EntitySchema({ name, collection: this.name });
const compositeFkIndexes: Dictionary<{ keyName: string }> = {};
const compositeFkUniques: Dictionary<{ keyName: string }> = {};
for (const index of this.indexes.filter(index => index.columnNames.length > 1)) {
const properties = index.columnNames.map(col => this.getPropertyName(namingStrategy, this.getColumn(col)!));
const ret = { name: index.keyName, properties: Utils.unique(properties) };
if (ret.properties.length === 1) {
const map = index.unique ? compositeFkUniques : compositeFkIndexes;
map[ret.properties[0]] = { keyName: index.keyName };
continue;
}
if (index.primary) {
//
} else if (index.unique) {
schema.addUnique(ret);
} else {
schema.addIndex(ret);
}
}
for (const column of this.getColumns()) {
const prop = this.getPropertyDeclaration(column, namingStrategy, schemaHelper, compositeFkIndexes, compositeFkUniques);
schema.addProperty(prop.name, prop.type, prop);
}
const meta = schema.init().meta;
meta.relations
.filter(prop => prop.primary && prop.reference === ReferenceType.MANY_TO_ONE && !meta.compositePK)
.forEach(prop => prop.reference = ReferenceType.ONE_TO_ONE);
return meta;
}
/**
* The shortest name is stripped of the default namespace. All other namespaced elements are returned as full-qualified names.
*/
getShortestName(defaultNamespaceName?: string): string {
if (!this.schema || this.schema === defaultNamespaceName || this.name.startsWith(this.schema + '.')) {
return this.name;
}
return `${this.schema}.${this.name}`;
}
getForeignKeys() {
return this.foreignKeys;
}
hasColumn(columnName: string) {
return columnName in this.columns;
}
getIndex(indexName: string) {
return this.indexes.find(i => i.keyName === indexName);
}
hasIndex(indexName: string) {
return !!this.getIndex(indexName);
}
getPrimaryKey() {
return this.indexes.find(i => i.primary);
}
hasPrimaryKey() {
return !!this.getPrimaryKey();
}
private getPropertyDeclaration(column: Column, namingStrategy: NamingStrategy, schemaHelper: SchemaHelper, compositeFkIndexes: Dictionary<{ keyName: string }>, compositeFkUniques: Dictionary<{ keyName: string }>) {
const fk = Object.values(this.foreignKeys).find(fk => fk.columnNames.includes(column.name));
const prop = this.getPropertyName(namingStrategy, column);
const index = compositeFkIndexes[prop] || this.indexes.find(idx => idx.columnNames[0] === column.name && !idx.composite && !idx.unique && !idx.primary);
const unique = compositeFkUniques[prop] || this.indexes.find(idx => idx.columnNames[0] === column.name && !idx.composite && idx.unique && !idx.primary);
const reference = this.getReferenceType(fk, unique);
const type = this.getPropertyType(namingStrategy, column, fk);
const fkOptions: Partial<EntityProperty> = {};
if (fk) {
fkOptions.fieldNames = fk.columnNames;
fkOptions.referencedTableName = fk.referencedTableName;
fkOptions.referencedColumnNames = fk.referencedColumnNames;
/* istanbul ignore next */
fkOptions.onUpdateIntegrity = fk.updateRule?.toLowerCase();
/* istanbul ignore next */
fkOptions.onDelete = fk.deleteRule?.toLowerCase();
}
return {
name: prop,
type,
reference,
columnType: column.type,
default: this.getPropertyDefaultValue(schemaHelper, column, type),
defaultRaw: this.getPropertyDefaultValue(schemaHelper, column, type, true),
nullable: column.nullable,
primary: column.primary,
fieldName: column.name,
length: column.length,
index: index ? index.keyName : undefined,
unique: unique ? unique.keyName : undefined,
...fkOptions,
};
}
private getReferenceType(fk?: ForeignKey, unique?: { keyName: string }): ReferenceType {
if (fk && unique) {
return ReferenceType.ONE_TO_ONE;
}
if (fk) {
return ReferenceType.MANY_TO_ONE;
}
return ReferenceType.SCALAR;
}
private getPropertyName(namingStrategy: NamingStrategy, column: Column): string {
const fk = Object.values(this.foreignKeys).find(fk => fk.columnNames.includes(column.name));
let field = column.name;
if (fk) {
const idx = fk.columnNames.indexOf(column.name);
field = field.replace(new RegExp(`_${fk.referencedColumnNames[idx]}$`), '');
}
return namingStrategy.columnNameToProperty(field);
}
private getPropertyType(namingStrategy: NamingStrategy, column: Column, fk?: ForeignKey): string {
if (fk) {
return namingStrategy.getClassName(fk.referencedTableName, '_');
}
return column.mappedType?.compareAsType() ?? 'unknown';
}
private getPropertyDefaultValue(schemaHelper: SchemaHelper, column: Column, propType: string, raw = false): any {
const empty = raw ? 'null' : undefined;
if (!column.default) {
return empty;
}
const val = schemaHelper.normalizeDefaultValue(column.default, column.length);
if (column.nullable && val === 'null') {
return empty;
}
if (propType === 'boolean' && !raw) {
return !['0', 'false', 'f', 'n', 'no', 'off'].includes('' + column.default);
}
if (propType === 'number') {
return +column.default;
}
return '' + val;
}
addIndex(meta: EntityMetadata, index: { properties: string | string[]; name?: string; type?: string; expression?: string; options?: Dictionary }, type: 'index' | 'unique' | 'primary') {
const properties = Utils.flatten(Utils.asArray(index.properties).map(prop => meta.properties[prop].fieldNames));
if (properties.length === 0) {
return;
}
const name = this.getIndexName(index.name!, properties, type);
this.indexes.push({
keyName: name,
columnNames: properties,
composite: properties.length > 1,
primary: type === 'primary',
unique: type !== 'index',
type: index.type,
expression: index.expression,
});
}
isInDefaultNamespace(defaultNamespaceName: string) {
return this.schema === defaultNamespaceName || this.schema == null;
}
toJSON(): Dictionary {
const { platform, columns, ...rest } = this;
const columnsMapped = Object.keys(columns).reduce((o, col) => {
const { mappedType, ...restCol } = columns[col];
o[col] = restCol;
o[col].mappedType = Object.keys(t).find(k => t[k] === mappedType.constructor);
return o;
}, {} as Dictionary);
return { columns: columnsMapped, ...rest };
}
} | the_stack |
import {DataModels, IManagementApiClient, Messages} from '@process-engine/management_api_contracts';
import {IIdentity} from '@essential-projects/iam_contracts';
import {Subscription} from '@essential-projects/event_aggregator_contracts';
import {NotFoundError} from '@essential-projects/errors_ts';
import {Correlation} from '@process-engine/management_api_contracts/dist/data_models/correlation';
import {IDashboardRepository} from '../contracts/IDashboardRepository';
import {TaskList, TaskListEntry, TaskSource, TaskType} from '../contracts/index';
import {applyPagination} from '../../../../services/pagination-module/pagination.module';
export class DashboardRepository implements IDashboardRepository {
protected managementApiClient: IManagementApiClient;
constructor(managementApiClient: IManagementApiClient) {
this.managementApiClient = managementApiClient;
}
public async getAllActiveCronjobs(
identity: IIdentity,
offset: number = 0,
limit: number = 0,
): Promise<DataModels.Cronjobs.CronjobList> {
const cronjobs: Array<DataModels.Cronjobs.CronjobConfiguration> = (await this.managementApiClient.getAllActiveCronjobs(
identity,
)) as any;
return {
cronjobs: applyPagination(cronjobs, offset, limit),
totalCount: cronjobs.length,
};
}
public async getAllActiveProcessInstances(
identity: IIdentity,
offset: number = 0,
limit: number = 0,
): Promise<DataModels.Correlations.ProcessInstanceList> {
const activeCorrelations: Array<DataModels.Correlations.Correlation> = (await this.getActiveCorrelations(identity))
.correlations;
const processInstancesForCorrelations: Array<Array<
DataModels.Correlations.ProcessInstance
>> = activeCorrelations.map((correlation) => {
const processInstances: Array<DataModels.Correlations.ProcessInstance> = correlation.processInstances.map(
(processInstance) => {
processInstance.correlationId = correlation.id;
return processInstance;
},
);
return processInstances;
});
const processInstances: Array<DataModels.Correlations.ProcessInstance> = [].concat(
...processInstancesForCorrelations,
);
return {processInstances: applyPagination(processInstances, offset, limit), totalCount: processInstances.length};
}
public async getProcessModels(identity: IIdentity): Promise<DataModels.ProcessModels.ProcessModelList> {
const result = await this.managementApiClient.getProcessModels(identity);
return {processModels: result.processModels, totalCount: result.processModels.length};
}
public async getActiveCorrelations(
identity: IIdentity,
offset: number = 0,
limit: number = 0,
): Promise<DataModels.Correlations.CorrelationList> {
const result = (await this.managementApiClient.getActiveCorrelations(identity, offset, limit)) as any;
return {correlations: result, totalCount: result.length};
}
public async getManualTasksForProcessModel(
identity: IIdentity,
processModelId: string,
): Promise<DataModels.ManualTasks.ManualTaskList> {
const result = await this.managementApiClient.getManualTasksForProcessModel(identity, processModelId);
return {manualTasks: result.manualTasks, totalCount: result.manualTasks.length};
}
public async getEmptyActivitiesForProcessModel(
identity: IIdentity,
processModelId: string,
): Promise<DataModels.EmptyActivities.EmptyActivityList> {
const result = await this.managementApiClient.getEmptyActivitiesForProcessModel(identity, processModelId);
return {emptyActivities: result.emptyActivities, totalCount: result.emptyActivities.length};
}
public async getUserTasksForProcessModel(
identity: IIdentity,
processModelId: string,
): Promise<DataModels.UserTasks.UserTaskList> {
const result = await this.managementApiClient.getUserTasksForProcessModel(identity, processModelId);
return {userTasks: result.userTasks, totalCount: result.userTasks.length};
}
public async getManualTasksForCorrelation(
identity: IIdentity,
correlationId: string,
): Promise<DataModels.ManualTasks.ManualTaskList> {
const result = await this.managementApiClient.getManualTasksForCorrelation(identity, correlationId);
return {manualTasks: result.manualTasks, totalCount: result.manualTasks.length};
}
public async getEmptyActivitiesForCorrelation(
identity: IIdentity,
correlationId: string,
): Promise<DataModels.EmptyActivities.EmptyActivityList> {
const result = await this.managementApiClient.getEmptyActivitiesForCorrelation(identity, correlationId);
return {emptyActivities: result.emptyActivities, totalCount: result.emptyActivities.length};
}
public async getUserTasksForCorrelation(
identity: IIdentity,
correlationId: string,
): Promise<DataModels.UserTasks.UserTaskList> {
const result = await this.managementApiClient.getUserTasksForCorrelation(identity, correlationId);
return {userTasks: result.userTasks, totalCount: result.userTasks.length};
}
public async getManualTasksForProcessInstance(
identity: IIdentity,
correlationId: string,
): Promise<DataModels.ManualTasks.ManualTaskList> {
const result = await this.managementApiClient.getManualTasksForProcessInstance(identity, correlationId);
return {manualTasks: result.manualTasks, totalCount: result.manualTasks.length};
}
public async getEmptyActivitiesForProcessInstance(
identity: IIdentity,
correlationId: string,
): Promise<DataModels.EmptyActivities.EmptyActivityList> {
const result = await this.managementApiClient.getEmptyActivitiesForProcessInstance(identity, correlationId);
return {emptyActivities: result.emptyActivities, totalCount: result.emptyActivities.length};
}
public async getUserTasksForProcessInstance(
identity: IIdentity,
correlationId: string,
): Promise<DataModels.UserTasks.UserTaskList> {
const result = await this.managementApiClient.getUserTasksForProcessInstance(identity, correlationId);
return {userTasks: result.userTasks, totalCount: result.userTasks.length};
}
public terminateProcessInstance(identity: IIdentity, processInstanceId: string): Promise<void> {
return this.managementApiClient.terminateProcessInstance(identity, processInstanceId);
}
public onProcessEnded(identity: IIdentity, callback: Function): Promise<Subscription> {
return this.managementApiClient.onProcessEnded(
identity,
(message: Messages.BpmnEvents.EndEventReachedMessage): void => {
callback();
},
);
}
public onProcessStarted(identity: IIdentity, callback: Function): Promise<Subscription> {
return this.managementApiClient.onProcessStarted(
identity,
(processStarted: Messages.SystemEvents.ProcessStartedMessage): void => {
callback(processStarted);
},
);
}
public onProcessError(identity: IIdentity, callback: Function): Promise<Subscription> {
return this.managementApiClient.onProcessError(
identity,
(processErrorMessage: Messages.SystemEvents.ProcessErrorMessage): void => {
callback(processErrorMessage);
},
);
}
public onProcessTerminated(identity: IIdentity, callback: Function): Promise<Subscription> {
return this.managementApiClient.onProcessTerminated(
identity,
(processErrorMessage: Messages.SystemEvents.ProcessErrorMessage): void => {
callback(processErrorMessage);
},
);
}
public onEmptyActivityFinished(identity: IIdentity, callback: Function): Promise<Subscription> {
return this.managementApiClient.onEmptyActivityFinished(
identity,
(emptyActivityFinishedMessage: Messages.SystemEvents.EmptyActivityFinishedMessage): void => {
callback(emptyActivityFinishedMessage);
},
);
}
public onEmptyActivityWaiting(identity: IIdentity, callback: Function): Promise<Subscription> {
return this.managementApiClient.onEmptyActivityWaiting(
identity,
(emptyActivityReachedMessage: Messages.SystemEvents.EmptyActivityReachedMessage): void => {
callback(emptyActivityReachedMessage);
},
);
}
public onManualTaskFinished(identity: IIdentity, callback: Function): Promise<Subscription> {
return this.managementApiClient.onManualTaskFinished(
identity,
(manualTaskFinishedMessage: Messages.SystemEvents.ManualTaskFinishedMessage): void => {
callback(manualTaskFinishedMessage);
},
);
}
public onManualTaskWaiting(identity: IIdentity, callback: Function): Promise<Subscription> {
return this.managementApiClient.onManualTaskWaiting(
identity,
(manualTaskReachedMessage: Messages.SystemEvents.ManualTaskReachedMessage): void => {
callback(manualTaskReachedMessage);
},
);
}
public onUserTaskFinished(identity: IIdentity, callback: Function): Promise<Subscription> {
return this.managementApiClient.onUserTaskFinished(
identity,
(userTaskFinishedMessage: Messages.SystemEvents.UserTaskFinishedMessage): void => {
callback(userTaskFinishedMessage);
},
);
}
public onUserTaskWaiting(identity: IIdentity, callback: Function): Promise<Subscription> {
return this.managementApiClient.onUserTaskWaiting(
identity,
(userTaskReachedMessage: Messages.SystemEvents.UserTaskReachedMessage): void => {
callback(userTaskReachedMessage);
},
);
}
public onCronjobCreated(identity: IIdentity, callback: Function): Promise<Subscription> {
return this.managementApiClient.onCronjobCreated(
identity,
(message: Messages.SystemEvents.CronjobCreatedMessage): void => {
callback(message);
},
);
}
public onCronjobExecuted(identity: IIdentity, callback: Function): Promise<Subscription> {
return this.managementApiClient.onCronjobExecuted(
identity,
(message: Messages.SystemEvents.CronjobExecutedMessage): void => {
callback(message);
},
);
}
public onCronjobRemoved(identity: IIdentity, callback: Function): Promise<Subscription> {
return this.managementApiClient.onCronjobRemoved(
identity,
(message: Messages.SystemEvents.CronjobRemovedMessage): void => {
callback(message);
},
);
}
public onCronjobStopped(identity: IIdentity, callback: Function): Promise<Subscription> {
return this.managementApiClient.onCronjobStopped(
identity,
(message: Messages.SystemEvents.CronjobStoppedMessage): void => {
callback(message);
},
);
}
public onCronjobUpdated(identity: IIdentity, callback: Function): Promise<Subscription> {
return this.managementApiClient.onCronjobUpdated(
identity,
(message: Messages.SystemEvents.CronjobUpdatedMessage): void => {
callback(message);
},
);
}
public finishManualTask(
identity: IIdentity,
processInstanceId: string,
correlationId: string,
manualTaskInstanceId: string,
): Promise<void> {
return this.managementApiClient.finishManualTask(identity, processInstanceId, correlationId, manualTaskInstanceId);
}
public finishUserTask(
identity: IIdentity,
processInstanceId: string,
correlationId: string,
userTaskInstanceId: string,
userTaskResult: DataModels.UserTasks.UserTaskResult,
): Promise<void> {
return this.managementApiClient.finishUserTask(
identity,
processInstanceId,
correlationId,
userTaskInstanceId,
userTaskResult,
);
}
public finishEmptyActivity(
identity: IIdentity,
processInstanceId: string,
correlationId: string,
emptyActivityInstanceId: string,
): Promise<void> {
return this.managementApiClient.finishEmptyActivity(
identity,
processInstanceId,
correlationId,
emptyActivityInstanceId,
);
}
public removeSubscription(identity: IIdentity, subscription: Subscription): Promise<void> {
return this.managementApiClient.removeSubscription(identity, subscription);
}
public async getAllSuspendedTasks(identity: IIdentity, offset: number = 0, limit: number = 0): Promise<TaskList> {
const allProcessModels: DataModels.ProcessModels.ProcessModelList = await this.getProcessModels(identity);
// TODO (ph): This will create 1 + n http reqeusts, where n is the number of process models in the processengine.
const promisesForAllUserTasks: Array<Promise<Array<TaskListEntry>>> = allProcessModels.processModels.map(
async (processModel: DataModels.ProcessModels.ProcessModel): Promise<Array<TaskListEntry>> => {
const userTaskList: DataModels.UserTasks.UserTaskList = await this.getUserTasksForProcessModel(
identity,
processModel.id,
);
return this.mapTasksToTaskListEntry(userTaskList.userTasks, TaskType.UserTask);
},
);
const promisesForAllManualTasks: Array<Promise<Array<TaskListEntry>>> = allProcessModels.processModels.map(
async (processModel: DataModels.ProcessModels.ProcessModel): Promise<Array<TaskListEntry>> => {
const manualTaskList: DataModels.ManualTasks.ManualTaskList = await this.getManualTasksForProcessModel(
identity,
processModel.id,
);
return this.mapTasksToTaskListEntry(manualTaskList.manualTasks, TaskType.ManualTask);
},
);
const promisesForAllEmptyActivities: Array<Promise<Array<TaskListEntry>>> = allProcessModels.processModels.map(
async (processModel: DataModels.ProcessModels.ProcessModel): Promise<Array<TaskListEntry>> => {
const emptyActivityList: DataModels.EmptyActivities.EmptyActivityList = await this.getEmptyActivitiesForProcessModel(
identity,
processModel.id,
);
return this.mapTasksToTaskListEntry(emptyActivityList.emptyActivities, TaskType.EmptyActivity);
},
);
// Concatenate the Promises for requesting UserTasks and requesting ManualTasks.
const promisesForAllTasksForAllProcessModels: Array<TaskListEntry> = [].concat(
promisesForAllUserTasks,
promisesForAllManualTasks,
promisesForAllEmptyActivities,
);
// Await all promises.
const allTasksForAllProcessModels: Array<TaskListEntry> = await Promise.all(promisesForAllTasksForAllProcessModels);
// Flatten all results.
const allTasks: Array<TaskListEntry> = [].concat(...allTasksForAllProcessModels);
const taskList: TaskList = {
taskListEntries: applyPagination(allTasks, offset, limit),
totalCount: allTasks.length,
};
return taskList;
}
public async getSuspendedTasksForProcessInstance(
identity: IIdentity,
processInstanceId: string,
offset: number = 0,
limit: number = 0,
): Promise<TaskList> {
const userTaskList: DataModels.UserTasks.UserTaskList = await this.getUserTasksForProcessInstance(
identity,
processInstanceId,
);
const manualTaskList: DataModels.ManualTasks.ManualTaskList = await this.getManualTasksForProcessInstance(
identity,
processInstanceId,
);
const emptyActivityList: DataModels.EmptyActivities.EmptyActivityList = await this.getEmptyActivitiesForProcessInstance(
identity,
processInstanceId,
);
const userTasks: Array<TaskListEntry> = this.mapTasksToTaskListEntry(userTaskList.userTasks, TaskType.UserTask);
const manualTasks: Array<TaskListEntry> = this.mapTasksToTaskListEntry(
manualTaskList.manualTasks,
TaskType.ManualTask,
);
const emptyActivities: Array<TaskListEntry> = this.mapTasksToTaskListEntry(
emptyActivityList.emptyActivities,
TaskType.EmptyActivity,
);
const taskListEntries: Array<TaskListEntry> = [].concat(userTasks, manualTasks, emptyActivities);
const taskList: TaskList = {
taskListEntries: applyPagination(taskListEntries, offset, limit),
totalCount: taskListEntries.length,
};
return taskList;
}
public async getSuspendedTasksForCorrelation(
identity: IIdentity,
correlationId: string,
offset: number = 0,
limit: number = 0,
): Promise<TaskList> {
const runningCorrelations: DataModels.Correlations.CorrelationList = await this.getActiveCorrelations(identity);
const correlation: DataModels.Correlations.Correlation = runningCorrelations.correlations.find(
(otherCorrelation: DataModels.Correlations.Correlation) => {
return otherCorrelation.id === correlationId;
},
);
const correlationWasNotFound: boolean = correlation === undefined;
if (correlationWasNotFound) {
throw new NotFoundError(`No correlation found with id ${correlationId}.`);
}
const userTaskList: DataModels.UserTasks.UserTaskList = await this.getUserTasksForCorrelation(
identity,
correlationId,
);
const manualTaskList: DataModels.ManualTasks.ManualTaskList = await this.getManualTasksForCorrelation(
identity,
correlationId,
);
const emptyActivityList: DataModels.EmptyActivities.EmptyActivityList = await this.getEmptyActivitiesForCorrelation(
identity,
correlationId,
);
const userTasks: Array<TaskListEntry> = this.mapTasksToTaskListEntry(userTaskList.userTasks, TaskType.UserTask);
const manualTasks: Array<TaskListEntry> = this.mapTasksToTaskListEntry(
manualTaskList.manualTasks,
TaskType.ManualTask,
);
const emptyActivities: Array<TaskListEntry> = this.mapTasksToTaskListEntry(
emptyActivityList.emptyActivities,
TaskType.EmptyActivity,
);
const taskListEntries: Array<TaskListEntry> = [].concat(userTasks, manualTasks, emptyActivities);
const taskList: TaskList = {
taskListEntries: applyPagination(taskListEntries, offset, limit),
totalCount: taskListEntries.length,
};
return taskList;
}
public async getSuspendedTasksForProcessModel(
identity: IIdentity,
processModelId: string,
offset: number = 0,
limit: number = 0,
): Promise<TaskList> {
const userTaskList: DataModels.UserTasks.UserTaskList = await this.getUserTasksForProcessModel(
identity,
processModelId,
);
const manualTaskList: DataModels.ManualTasks.ManualTaskList = await this.getManualTasksForProcessModel(
identity,
processModelId,
);
const emptyActivityList: DataModels.EmptyActivities.EmptyActivityList = await this.getEmptyActivitiesForProcessModel(
identity,
processModelId,
);
const userTasks: Array<TaskListEntry> = this.mapTasksToTaskListEntry(userTaskList.userTasks, TaskType.UserTask);
const manualTasks: Array<TaskListEntry> = this.mapTasksToTaskListEntry(
manualTaskList.manualTasks,
TaskType.ManualTask,
);
const emptyActivities: Array<TaskListEntry> = this.mapTasksToTaskListEntry(
emptyActivityList.emptyActivities,
TaskType.EmptyActivity,
);
const taskListEntries: Array<TaskListEntry> = [].concat(userTasks, manualTasks, emptyActivities);
const taskList: TaskList = {
taskListEntries: applyPagination(taskListEntries, offset, limit),
totalCount: taskListEntries.length,
};
return taskList;
}
public getCorrelationById(identity: IIdentity, correlationId: string): Promise<Correlation> {
return this.managementApiClient.getCorrelationById(identity, correlationId);
}
private mapTasksToTaskListEntry(tasks: Array<TaskSource>, targetType: TaskType): Array<TaskListEntry> {
const mappedTasks: Array<TaskListEntry> = tasks.map(
(task: TaskSource): TaskListEntry => {
return {
correlationId: task.correlationId,
id: task.id,
flowNodeInstanceId: task.flowNodeInstanceId,
processInstanceId: task.processInstanceId,
processModelId: task.processModelId,
name: task.name,
// NOTE: Can't use instanceof or typeof, because the tasks were received as a plain JSON that does not have any type infos.
// TODO: Add type mapping to the Management API Client.
taskType: targetType,
};
},
);
return mappedTasks;
}
} | the_stack |
import { hasSortKey, Metadata } from '../decorator/metadata/metadata'
import { metadataForModel } from '../decorator/metadata/metadata-for-model.function'
import { hasType, Key, PropertyMetadata } from '../decorator/metadata/property-metadata.model'
import { createOptModelLogger } from '../logger/logger'
import { ModelConstructor } from '../model/model-constructor'
import { MapperForType } from './for-type/base.mapper'
import { BooleanMapper } from './for-type/boolean.mapper'
import { CollectionMapper } from './for-type/collection.mapper'
import { NullMapper } from './for-type/null.mapper'
import { NumberMapper } from './for-type/number.mapper'
import { ObjectMapper } from './for-type/object.mapper'
import { StringMapper } from './for-type/string.mapper'
import { AttributeValueType } from './type/attribute-value-type.type'
import { Attribute, Attributes } from './type/attribute.type'
import { Binary } from './type/binary.type'
import { NullType } from './type/null.type'
import { UndefinedType } from './type/undefined.type'
import { getPropertyPath, typeOf, typeOfFromDb } from './util'
/**
* @hidden
*/
const mapperForType: Map<AttributeValueType, MapperForType<any, any>> = new Map()
/**
* @hidden
*/
const logger = createOptModelLogger('dynamo.mapper.mapper')
/**
* mapps an item according to given model constructor [its meta data] to attributes
*/
export function toDb<T>(item: T, modelConstructor?: ModelConstructor<T>): Attributes<T> {
logger.verbose('map toDb', modelConstructor, { item })
const mapped = <Attributes<T>>{}
if (modelConstructor) {
const metadata: Metadata<T> = metadataForModel(modelConstructor)
/*
* initialize possible properties with default value providers
*/
if (metadata) {
metadata.getPropertiesWithDefaultValueProvider().forEach(propertyMetadata => {
const currentVal = Reflect.get(<any>item, propertyMetadata.name)
if (currentVal === undefined || currentVal === null) {
// tslint:disable-next-line:no-non-null-assertion
Reflect.set(<any>item, propertyMetadata.name, propertyMetadata.defaultValueProvider!())
}
})
}
}
const propertyNames = <Array<keyof T>>Object.getOwnPropertyNames(item) || []
propertyNames.forEach((propertyKey) => {
/*
* 1) get the value of the property
*/
const propertyValue = getPropertyValue(item, propertyKey)
let attributeValue: Attribute | undefined | null
if (propertyValue === undefined || propertyValue === null) {
// noop ignore because we can't map it
} else {
/*
* 2) decide how to map the property depending on type or value
*/
let propertyMetadata: PropertyMetadata<T, any> | null | undefined
if (modelConstructor) {
propertyMetadata = metadataForModel(modelConstructor).forProperty(propertyKey)
}
if (propertyMetadata) {
if (propertyMetadata.transient) {
// 3a_1) skip transient property
} else {
// 3a_2) property metadata is defined and property is not marked not transient
attributeValue = toDbOne(propertyValue, getPropertyPath<T>(modelConstructor, propertyKey), propertyMetadata)
}
} else {
// 3b) no metadata found
attributeValue = toDbOne(propertyValue, getPropertyPath<T>(modelConstructor, propertyKey))
}
if (attributeValue === undefined) {
// no-op transient field, just ignore it
} else if (attributeValue === null) {
// empty values will be ignored too
} else {
;(<any>mapped)[propertyMetadata ? propertyMetadata.nameDb : propertyKey] = attributeValue
}
}
})
return mapped
}
/**
* maps a js value to its dynamoDB attribute
* @param propertyValue The value which should be mapped
* @param propertyMetadata Some optional metadata
*/
export function toDbOne(propertyValue: any, propertyMetadata?: PropertyMetadata<any>): Attribute | null
/**
* maps a js value to its dynamoDB attribute.
* You can provide the property path to have a more verbose output
*
* @param propertyValue The value which should be mapped
* @param propertyPath The property path is only used for logging purposes
* @param propertyMetadata Some optional metadata
*/
export function toDbOne(
propertyValue: any,
propertyPath: string,
propertyMetadata?: PropertyMetadata<any>,
): Attribute | null
export function toDbOne(
propertyValue: any,
propertyPathOrMetadata?: string | PropertyMetadata<any>,
propertyMetadata?: PropertyMetadata<any>,
): Attribute | null {
logger.verbose('map toDbOne', null, { propertyValue, propertyPathOrMetadata, propertyMetadata })
const propertyPath =
propertyPathOrMetadata && typeof propertyPathOrMetadata === 'string' ? propertyPathOrMetadata : null
propertyMetadata =
propertyPathOrMetadata && typeof propertyPathOrMetadata !== 'string' ? propertyPathOrMetadata : propertyMetadata
const explicitType: AttributeValueType | null = hasType(propertyMetadata) ? propertyMetadata.typeInfo.type : null
const type: AttributeValueType = explicitType || typeOf(propertyValue, propertyPath)
const mapper = propertyMetadata && propertyMetadata.mapper ? propertyMetadata.mapper() : forType(type)
const attrValue: Attribute | null = explicitType
? mapper.toDb(propertyValue, propertyMetadata)
: mapper.toDb(propertyValue)
// some basic validation
if (propertyMetadata && propertyMetadata.key) {
if (attrValue === null) {
throw new Error(`${propertyMetadata.name.toString()} is null but is a ${propertyMetadata.key.type} key`)
}
if (!('S' in attrValue) && !('N' in attrValue) && !('B' in attrValue)) {
throw new Error(
`\
DynamoDb only allows string, number or binary type for RANGE and HASH key. \
Make sure to define a custom mapper for '${propertyMetadata.name.toString()}' which returns a string, number or binary value for partition key, \
type ${type} cannot be used as partition key, value = ${JSON.stringify(propertyValue)}`,
)
}
}
return attrValue
}
/**
* @hidden
*/
function testForKey<T>(p: PropertyMetadata<T>): p is PropertyMetadata<T> & { key: Key } {
return !!p.key
}
/**
* returns the function for the given ModelConstructor to create the AttributeMap with HASH (and RANGE) Key of a given item.
* @param modelConstructor
*/
export function createToKeyFn<T>(modelConstructor: ModelConstructor<T>): (item: Partial<T>) => Attributes<T> {
const metadata = metadataForModel(modelConstructor)
const properties = metadata.modelOptions.properties
if (!properties.length) {
throw new Error('no properties defined on metadata')
}
const keyProperties = properties.filter(testForKey)
return (item: Partial<T>) => {
logger.verbose('create key', null, { item, propertyMeta: keyProperties })
return keyProperties.reduce((key, propMeta) => {
if (item[propMeta.name] === null || item[propMeta.name] === undefined) {
throw new Error(`there is no value for property ${propMeta.name.toString()} but is ${propMeta.key.type} key`)
}
const propertyValue = getPropertyValue(item, propMeta.name)
key[propMeta.nameDb] = <Attribute>toDbOne(propertyValue, propMeta)
return key
}, <Attributes<T>>{})
}
}
/**
* creates toKeyFn and applies item to it.
* @see {@link createToKeyFn}
*/
export function toKey<T>(item: T, modelConstructor: ModelConstructor<T>): Attributes<T> {
return createToKeyFn(modelConstructor)(item)
}
/**
* @hidden
*/
export function createKeyAttributes<T>(
metadata: Metadata<T>,
partitionKey: any,
sortKey?: any,
): Attributes<Partial<T>> {
const partitionKeyProp = metadata.getPartitionKey()
const partitionKeyMetadata = metadata.forProperty(partitionKeyProp)
if (!partitionKeyMetadata) {
throw new Error('metadata for partition key must be defined')
}
const keyAttributeMap = <Attributes<T>>{
[partitionKeyMetadata.nameDb]: toDbOne(partitionKey, partitionKeyMetadata),
}
if (hasSortKey(metadata)) {
if (sortKey === null || sortKey === undefined) {
throw new Error(`please provide the sort key for attribute ${metadata.getSortKey()}`)
}
const sortKeyProp = metadata.getSortKey()
const sortKeyMetadata = metadata.forProperty(sortKeyProp)
if (!sortKeyMetadata) {
throw new Error('metadata for sort key must be defined')
}
keyAttributeMap[sortKeyMetadata.nameDb] = <Attribute>toDbOne(sortKey, sortKeyMetadata)
}
return keyAttributeMap
}
/**
* parses attributes to a js item according to the given model constructor [its meta data]
*/
export function fromDb<T>(attributeMap: Attributes<T>, modelConstructor?: ModelConstructor<T>): T {
logger.verbose('parse fromDb', modelConstructor, { attributeMap })
const model: T = <T>{}
Object.getOwnPropertyNames(attributeMap).forEach((attributeName) => {
/*
* 1) get the value of the property
*/
const attributeValue = (<any>attributeMap)[attributeName]
/*
* 2) decide how to map the property depending on type or value
*/
let modelValue: T | undefined
let propertyMetadata: PropertyMetadata<any, any> | null | undefined
if (modelConstructor) {
propertyMetadata = metadataForModel(modelConstructor).forProperty(attributeName)
}
if (propertyMetadata) {
if (propertyMetadata.transient) {
// skip transient property
} else {
/*
* 3a) property metadata is defined
*/
if (propertyMetadata && propertyMetadata.mapper) {
// custom mapper
modelValue = propertyMetadata.mapper().fromDb(attributeValue, propertyMetadata)
} else {
modelValue = fromDbOne(attributeValue, propertyMetadata)
}
}
} else {
modelValue = fromDbOne(attributeValue)
}
if (modelValue !== null && modelValue !== undefined) {
Reflect.set(<any>model, propertyMetadata ? propertyMetadata.name : attributeName, modelValue)
}
})
return model
}
/**
* parses an attribute to a js value according to the given property metadata
*/
export function fromDbOne<T>(attributeValue: Attribute, propertyMetadata?: PropertyMetadata<any, any>): T {
logger.verbose('parse fromDbOne', null, { attributeValue, propertyMetadata })
const explicitType: AttributeValueType | null = hasType(propertyMetadata) ? propertyMetadata.typeInfo.type : null
const type: AttributeValueType = explicitType || typeOfFromDb(attributeValue)
if (explicitType) {
return forType(type).fromDb(attributeValue, propertyMetadata)
} else {
return forType(type).fromDb(attributeValue)
}
}
/**
* @hidden
*/
export function forType(type: AttributeValueType): MapperForType<any, Attribute> {
let mapper = mapperForType.get(type)
if (!mapper) {
switch (type) {
case String:
mapper = StringMapper
break
case Number:
mapper = NumberMapper
break
case Boolean:
mapper = BooleanMapper
break
case Map:
// Maps support complex types as keys, we only support String & Number as Keys, otherwise a .toString() method should be implemented,
// so we now how to save a key
// mapperForType = new MapMapper()
throw new Error('Map is not supported to be mapped for now')
case Array:
mapper = CollectionMapper
break
case Set:
mapper = CollectionMapper
break
case NullType:
mapper = NullMapper
break
case Binary:
// The applications must encode binary values in base64-encoded format before sending them to DynamoDB.
// Upon receipt of these values,
// DynamoDB decodes the data into an unsigned byte array and uses that as the length of the binary attribute.
throw new Error('no mapper for binary type implemented yet')
case UndefinedType:
mapper = ObjectMapper
break
case Object:
default:
// return ObjectMapper as default to support nested @Model decorated classes (nested complex classes)
// just note that the property still needs @Property decoration to get the metadata of the complex type
mapper = ObjectMapper
}
mapperForType.set(type, mapper)
}
return mapper
}
/**
* @hidden
*/
export function getPropertyValue(item: any, propertyKey: PropertyKey): any {
const propertyDescriptor = Object.getOwnPropertyDescriptor(item, propertyKey)
// use get accessor if available otherwise use value property of descriptor
if (propertyDescriptor) {
if (propertyDescriptor.get) {
return propertyDescriptor.get()
} else {
return propertyDescriptor.value
}
} else {
throw new Error(
`there is no property descriptor for item ${JSON.stringify(item)} and property key ${<string>propertyKey}`,
)
}
} | the_stack |
import Layout from "../../../components/Layout";
import Link from "next/link";
import Header from "../../../components/Header";
import {
TrashIcon,
PencilAltIcon,
DocumentAddIcon,
} from "@heroicons/react/outline";
import { EmptyState } from "../../../components/EmptyState";
import UserGroup, { UserGroups } from "../../../models/userGroup";
import User, { Users } from "../../../models/user";
import Project, { Projects } from "../../../models/project";
import { GetServerSideProps } from "next";
import { DestructiveModal } from "../../../components/DestructiveModal";
import { useState } from "react";
import { useRouter } from "next/router";
interface Props {
projects: Project[];
userGroup: UserGroup;
users: User[];
}
export default function EditPage(props: Props) {
const projects = props.projects;
const userGroup = props.userGroup;
const users = props.users;
const [deleteModalOpen, setDeleteModalOpen] = useState(false);
const [userIdToDelete, setUserIdToDelete] = useState<string | null>(null);
const router = useRouter();
const confirmDeleteUser = (event: React.MouseEvent, userId: string) => {
event.preventDefault();
setUserIdToDelete(userId);
setDeleteModalOpen(true);
};
const deleteUser = async () => {
if (userIdToDelete) {
await Users.remove(userIdToDelete, userGroup.id);
}
setUserIdToDelete(null);
setDeleteModalOpen(false);
router.reload();
};
const cancelDeleteUser = async () => {
setUserIdToDelete(null);
setDeleteModalOpen(false);
};
if (!userGroup) {
return (
<Layout
title="Edit User Group"
section="usergroups"
projects={projects}
selectedProjectId={undefined}
>
<main className="px-8 py-12">
<p>Not Found</p>
</main>
</Layout>
);
} else if (users.length == 0) {
return (
<Layout
title="Edit User Group"
section="usergroups"
projects={projects}
selectedProjectId={undefined}
>
<main className="px-8 py-12">
<Link
href={"/usergroups/" + userGroup.id + "/users/create"}
passHref={true}
>
<button type="button" className="block w-full">
<EmptyState
title="No users"
message="Add your first user."
buttonTitle="New User"
/>
</button>
</Link>
</main>
</Layout>
);
} else {
return (
<Layout
title="Edit User Group"
section="usergroups"
projects={projects}
selectedProjectId={undefined}
>
<Header title={userGroup.name + " Users "} />
<main className="px-8 py-12">
<div className="float-right align-middle">
<Link
href={"/usergroups/" + userGroup.id + "/users/create"}
passHref={true}
>
<button
type="button"
className="inline-flex items-center px-3 py-1 border border-transparent rounded-md shadow-sm text-sm font-medium text-white bg-indigo-600 hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-indigo-500"
>
<DocumentAddIcon
className="-ml-1 mr-1 h-5 w-5"
aria-hidden="true"
/>
Add User
</button>
</Link>
</div>
<div className="flex flex-col clear-both">
<div className="-my-2 mt-1 overflow-x-auto sm:-mx-6 lg:-mx-8">
<div className="py-2 align-middle inline-block min-w-full sm:px-6 lg:px-8">
<div className="shadow overflow-hidden border-b border-gray-200 sm:rounded-lg">
<table className="min-w-full divide-y divide-gray-200">
<thead className="bg-gray-50">
<tr>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider"
>
Address
</th>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider"
>
Name
</th>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider"
>
E-mail
</th>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider"
>
Share
</th>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider"
></th>
</tr>
</thead>
<tbody className="bg-white divide-y divide-gray-200">
{users.map((user) => {
return (
<Link
key={user.id}
href={
"/usergroups/" +
userGroup.id +
"/users/" +
user.id +
"/edit"
}
passHref={true}
>
<tr className="hover:bg-gray-100 cursor-pointer">
<td className="px-6 py-4 whitespace-nowrap">
<div className="text-xs text-gray-900 font-mono">
{user.address}
</div>
</td>
<td className="px-6 py-4 whitespace-nowrap">
<div className="text-sm text-gray-900">
{user.name}
</div>
</td>
<td className="px-6 py-4 whitespace-nowrap">
<div className="text-sm text-gray-900">
{user.email}
</div>
</td>
<td className="px-6 py-4 whitespace-nowrap">
<div className="text-sm text-gray-900">
{user.share}
</div>
</td>
<td align="right">
<Link
href={
"/usergroups/" +
userGroup.id +
"/users/" +
user.id +
"/edit"
}
passHref={true}
>
<a
href="#"
className="text-indigo-600 hover:text-indigo-900 inline-block mr-2"
>
<PencilAltIcon
className="h-5 w-5 text-gray-400"
aria-hidden="true"
/>
</a>
</Link>
<a
href="#"
onClick={(e) => confirmDeleteUser(e, user.id)}
className="text-indigo-600 hover:text-indigo-900 inline-block mr-2"
>
<TrashIcon
className="h-5 w-5 text-gray-400"
aria-hidden="true"
/>
</a>
</td>
</tr>
</Link>
);
})}
</tbody>
</table>
</div>
</div>
</div>
</div>
{/* <div className="w-1/2 float-right border-2 border-gray-100 rounded-md mt-4 p-3">
<p>Add Existing User</p>
<div className="mt-1 rounded-md shadow-sm">
<div className="relative flex-grow focus-within:z-10">
<div className="absolute inset-y-0 left-0 pl-3 flex items-center pointer-events-none">
<SearchIcon className="h-5 w-5 text-gray-400" aria-hidden="true" />
</div>
<input
type="text"
name="search-users"
id="search-users"
className="focus:ring-indigo-500 focus:border-indigo-500 rounded pl-10 sm:block text-xs border-gray-300"
placeholder="John Doe, AaGhxCk3PCghrS7je44gdtWrKZGQiJRHmuJtZAv9xy6P"
/>
</div>
</div>
</div>
<div className="clear-both"></div> */}
<DestructiveModal
title="Delete User"
message={
"Are you sure you want to delete the user ‘" +
(users.find((user) => user.id == userIdToDelete)?.name ??
"Unknown") +
"’? This action cannot be undone."
}
deleteAction={() => {
deleteUser();
}}
cancelAction={() => {
cancelDeleteUser();
}}
show={deleteModalOpen}
/>
</main>
</Layout>
);
}
}
export const getServerSideProps: GetServerSideProps = async (context) => {
try {
const userGroupId = context.query.groupId?.toString();
if (userGroupId) {
const projects = await Projects.all();
const userGroup = await UserGroups.withId(userGroupId);
const users = await Users.all(userGroupId);
return {
props: {
projects,
userGroup,
users,
},
};
}
} catch (error) {
console.log("Error: ", error);
}
return {
props: {},
};
}; | the_stack |
import 'reflect-metadata'
import { join } from 'path'
import { MarkOptional } from 'ts-essentials'
import { Filesystem } from '@poppinss/dev-utils'
import { LucidModel } from '@ioc:Adonis/Lucid/Orm'
import { RedisManagerContract } from '@ioc:Adonis/Addons/Redis'
import { HttpContextContract } from '@ioc:Adonis/Core/HttpContext'
import { ApplicationContract } from '@ioc:Adonis/Core/Application'
import { DatabaseContract, QueryClientContract } from '@ioc:Adonis/Lucid/Database'
import { OATGuard } from '../src/Guards/Oat'
import { SessionGuard } from '../src/Guards/Session'
import { BasicAuthGuard } from '../src/Guards/BasicAuth'
import { LucidProvider } from '../src/UserProviders/Lucid'
import { DatabaseProvider } from '../src/UserProviders/Database'
import { TokenRedisProvider } from '../src/TokenProviders/Redis'
import { TokenDatabaseProvider } from '../src/TokenProviders/Database'
import {
LucidProviderModel,
LucidProviderConfig,
LucidProviderContract,
TokenProviderContract,
DatabaseProviderConfig,
DatabaseProviderContract,
DatabaseTokenProviderConfig,
} from '@ioc:Adonis/Addons/Auth'
import { Application } from '@adonisjs/core/build/standalone'
export const fs = new Filesystem(join(__dirname, '__app'))
/**
* Setup application
*/
export async function setupApplication(
additionalProviders?: string[],
environment: 'web' | 'repl' | 'test' = 'test'
) {
await fs.add('.env', '')
await fs.add(
'config/app.ts',
`
export const appKey = 'averylong32charsrandomsecretkey',
export const http = {
cookie: {},
trustProxy: () => true,
}
`
)
await fs.add(
'config/hash.ts',
`
const hashConfig = {
default: 'bcrypt' as const,
list: {
bcrypt: {
driver: 'bcrypt',
rounds: 10,
},
},
}
export default hashConfig
`
)
await fs.add(
'config/session.ts',
`
const sessionConfig = {
driver: 'cookie',
cookieName: 'adonis-session',
clearWithBrowser: false,
age: '2h',
cookie: {
path: '/',
},
}
export default sessionConfig
`
)
await fs.add(
'config/database.ts',
`const databaseConfig = {
connection: 'primary',
connections: {
primary: {
client: 'sqlite3',
connection: {
filename: '${join(fs.basePath, 'primary.sqlite3')}',
},
},
secondary: {
client: 'sqlite3',
connection: {
filename: '${join(fs.basePath, 'secondary.sqlite3')}',
},
},
}
}
export default databaseConfig`
)
await fs.add(
'config/auth.ts',
`const authConfig = {
guard: 'web',
guards: {
web: {
},
}
}
export default authConfig`
)
await fs.add(
'config/redis.ts',
`const redisConfig = {
connection: 'local',
connections: {
local: {},
localDb1: {
db: '2'
}
}
}
export default redisConfig`
)
const app = new Application(fs.basePath, environment, {
aliases: {
App: './app',
},
providers: [
'@adonisjs/core',
'@adonisjs/repl',
'@adonisjs/session',
'@adonisjs/lucid',
'@adonisjs/redis',
].concat(additionalProviders || []),
})
await app.setup()
await app.registerProviders()
await app.bootProviders()
return app
}
/**
* Create the users tables
*/
async function createUsersTable(client: QueryClientContract) {
await client.schema.createTable('users', (table) => {
table.increments('id').notNullable().primary()
table.string('username').notNullable().unique()
table.string('email').notNullable().unique()
table.string('password')
table.string('remember_me_token').nullable()
table.boolean('is_active').notNullable().defaultTo(1)
table.string('country').notNullable().defaultTo('IN')
})
}
/**
* Create the api tokens tables
*/
async function createTokensTable(client: QueryClientContract) {
await client.schema.createTable('api_tokens', (table) => {
table.increments('id').notNullable().primary()
table.integer('user_id').notNullable().unsigned()
table.string('name').notNullable()
table.string('type').notNullable()
table.string('token').notNullable()
table.timestamp('expires_at', { useTz: true }).nullable()
table.string('ip_address').nullable()
table.string('device_name').nullable()
table.timestamps(true)
})
}
/**
* Returns default config for the lucid provider
*/
export function getLucidProviderConfig<User extends LucidProviderModel>(
config: MarkOptional<LucidProviderConfig<User>, 'driver' | 'uids' | 'identifierKey' | 'user'>
) {
const defaults: LucidProviderConfig<User> = {
driver: 'lucid' as const,
uids: ['username', 'email' as any],
model: config.model,
identifierKey: 'id',
}
return defaults
}
/**
* Returns default config for the database provider
*/
export function getDatabaseProviderConfig() {
const defaults: DatabaseProviderConfig = {
driver: 'database' as const,
uids: ['username', 'email'],
identifierKey: 'id',
usersTable: 'users',
}
return defaults
}
/**
* Performs an initial setup
*/
export async function setup(application: ApplicationContract) {
const db = application.container.use('Adonis/Lucid/Database')
await createUsersTable(db.connection())
await createUsersTable(db.connection('secondary'))
await createTokensTable(db.connection())
await createTokensTable(db.connection('secondary'))
}
/**
* Performs cleanup
*/
export async function cleanup(application: ApplicationContract) {
const db = application.container.use('Adonis/Lucid/Database')
await db.connection().schema.dropTableIfExists('users')
await db.connection('secondary').schema.dropTableIfExists('users')
await db.manager.closeAll()
await fs.cleanup()
}
/**
* Reset database tables
*/
export async function reset(application: ApplicationContract) {
const db = application.container.use('Adonis/Lucid/Database')
await db.connection().truncate('users')
await db.connection('secondary').truncate('users')
await db.connection().truncate('api_tokens')
await db.connection('secondary').truncate('api_tokens')
}
/**
* Returns an instance of the lucid provider
*/
export function getLucidProvider<User extends LucidProviderModel>(
application: ApplicationContract,
config: MarkOptional<LucidProviderConfig<User>, 'driver' | 'uids' | 'identifierKey' | 'user'>
) {
const defaults = getLucidProviderConfig(config)
const normalizedConfig = Object.assign(defaults, config) as LucidProviderConfig<User>
return new LucidProvider(application, normalizedConfig) as unknown as LucidProviderContract<User>
}
/**
* Returns an instance of the database provider
*/
export function getDatabaseProvider(
application: ApplicationContract,
config: Partial<DatabaseProviderConfig>
) {
const defaults = getDatabaseProviderConfig()
const normalizedConfig = Object.assign(defaults, config) as DatabaseProviderConfig
return new DatabaseProvider(
application,
normalizedConfig,
application.container.use('Adonis/Lucid/Database')
) as unknown as DatabaseProviderContract<any>
}
/**
* Returns an instance of the session driver.
*/
export function getSessionDriver(
app: ApplicationContract,
provider: DatabaseProviderContract<any> | LucidProviderContract<any>,
providerConfig: DatabaseProviderConfig | LucidProviderConfig<any>,
ctx: HttpContextContract,
name?: string
) {
const config = {
driver: 'session' as const,
loginRoute: '/login',
provider: providerConfig,
}
return new SessionGuard(
name || 'session',
config,
app.container.use('Adonis/Core/Event'),
provider,
ctx
)
}
/**
* Returns an instance of the api tokens guard.
*/
export function getApiTokensGuard(
app: ApplicationContract,
provider: DatabaseProviderContract<any> | LucidProviderContract<any>,
providerConfig: DatabaseProviderConfig | LucidProviderConfig<any>,
ctx: HttpContextContract,
tokensProvider: TokenProviderContract,
tokenProviderConfig?: DatabaseTokenProviderConfig
) {
const config = {
driver: 'oat' as const,
tokenProvider: tokenProviderConfig || {
driver: 'database' as const,
table: 'api_tokens',
},
provider: providerConfig,
}
return new OATGuard(
'api',
config,
app.container.use('Adonis/Core/Event'),
provider,
ctx,
tokensProvider
)
}
/**
* Returns an instance of the basic auth guard.
*/
export function getBasicAuthGuard(
app: ApplicationContract,
provider: DatabaseProviderContract<any> | LucidProviderContract<any>,
providerConfig: DatabaseProviderConfig | LucidProviderConfig<any>,
ctx: HttpContextContract
) {
const config = {
driver: 'basic' as const,
realm: 'Login',
provider: providerConfig,
}
return new BasicAuthGuard('basic', config, app.container.use('Adonis/Core/Event'), provider, ctx)
}
/**
* Returns the database token provider
*/
export function getTokensDbProvider(db: DatabaseContract) {
return new TokenDatabaseProvider(
{
table: 'api_tokens',
driver: 'database',
},
db
)
}
/**
* Returns the database token provider
*/
export function getTokensRedisProvider(redis: RedisManagerContract) {
return new TokenRedisProvider(
{
driver: 'redis',
redisConnection: 'local',
},
redis
)
}
/**
* Returns the user model
*/
export function getUserModel(BaseModel: LucidModel) {
const UserModel = class User extends BaseModel {
public id: number
public username: string
public password: string
public email: string
public rememberMeToken: string
}
UserModel.boot()
UserModel.$addColumn('id', { isPrimary: true })
UserModel.$addColumn('username', {})
UserModel.$addColumn('email', {})
UserModel.$addColumn('password', {})
UserModel.$addColumn('rememberMeToken', {})
return UserModel
}
/**
* Signs value to be set as cookie header
*/
export function signCookie(app: ApplicationContract, value: any, name: string) {
return `${name}=s:${app.container
.use('Adonis/Core/Encryption')
.verifier.sign(value, undefined, name)}`
}
/**
* Encrypt value to be set as cookie header
*/
export function encryptCookie(app: ApplicationContract, value: any, name: string) {
return `${name}=e:${app.container.use('Adonis/Core/Encryption').encrypt(value, undefined, name)}`
}
/**
* Decrypt cookie
*/
export function decryptCookie(app: ApplicationContract, cookie: any, name: string) {
const cookieValue = decodeURIComponent(cookie.split(';')[0]).replace(`${name}=`, '').slice(2)
return app.container.use('Adonis/Core/Encryption').decrypt<any>(cookieValue, name)
}
/**
* Unsign cookie
*/
export function unsignCookie(app: ApplicationContract, cookie: any, name: string) {
const cookieValue = decodeURIComponent(cookie.split(';')[0]).replace(`${name}=`, '').slice(2)
return app.container.use('Adonis/Core/Encryption').verifier.unsign<any>(cookieValue, name)
}
/**
* Mocks action on a object
*/
export function mockAction(collection: any, name: string, verifier: any) {
collection[name] = function (...args: any[]) {
verifier(...args)
delete collection[name]
}
}
/**
* Mocks property on a object
*/
export function mockProperty(collection: any, name: string, value: any) {
Object.defineProperty(collection, name, {
get() {
delete collection[name]
return value
},
enumerable: true,
configurable: true,
})
} | the_stack |
import * as path from 'path';
import sinon from 'sinon';
import tap from 'tap';
import * as dbutils from '../src/db/utils';
import controllers from '../src/index';
import readConfig from '../src/read-config';
import { IdPConfig, JacksonOption } from '../src/typings';
import { saml_config } from './fixture';
let apiController;
const CLIENT_ID = '75edb050796a0eb1cf2cfb0da7245f85bc50baa7';
const PROVIDER = 'accounts.google.com';
const OPTIONS = <JacksonOption>{
externalUrl: 'https://my-cool-app.com',
samlAudience: 'https://saml.boxyhq.com',
samlPath: '/sso/oauth/saml',
db: {
engine: 'mem',
},
};
tap.before(async () => {
const controller = await controllers(OPTIONS);
apiController = controller.apiController;
});
tap.teardown(async () => {
process.exit(0);
});
tap.test('controller/api', async (t) => {
const metadataPath = path.join(__dirname, '/data/metadata');
const config = await readConfig(metadataPath);
t.afterEach(async () => {
await apiController.deleteConfig({
tenant: saml_config.tenant,
product: saml_config.product,
});
});
t.test('Create the config', async (t) => {
t.test('when required fields are missing or invalid', async (t) => {
t.test('when `encodedRawMetadata` is empty', async (t) => {
const body: Partial<IdPConfig> = Object.assign({}, saml_config);
delete body['encodedRawMetadata'];
try {
await apiController.config(body);
t.fail('Expecting JacksonError.');
} catch (err: any) {
t.equal(err.message, 'Please provide rawMetadata or encodedRawMetadata');
t.equal(err.statusCode, 400);
}
t.end();
});
t.test('when `defaultRedirectUrl` is empty', async (t) => {
const body: Partial<IdPConfig> = Object.assign({}, saml_config);
delete body['defaultRedirectUrl'];
try {
await apiController.config(body as IdPConfig);
t.fail('Expecting JacksonError.');
} catch (err: any) {
t.equal(err.message, 'Please provide a defaultRedirectUrl');
t.equal(err.statusCode, 400);
}
t.end();
});
t.test('when `redirectUrl` is empty', async (t) => {
const body: Partial<IdPConfig> = Object.assign({}, saml_config);
delete body['redirectUrl'];
try {
await apiController.config(body as IdPConfig);
t.fail('Expecting JacksonError.');
} catch (err: any) {
t.equal(err.message, 'Please provide redirectUrl');
t.equal(err.statusCode, 400);
}
t.end();
});
t.test('when defaultRedirectUrl or redirectUrl is invalid', async (t) => {
const body: Partial<IdPConfig> = Object.assign({}, saml_config);
t.test('when defaultRedirectUrl is invalid', async (t) => {
body['defaultRedirectUrl'] = 'http://localhost::';
try {
await apiController.config(body as IdPConfig);
t.fail('Expecting JacksonError.');
} catch (err: any) {
t.equal(err.message, 'defaultRedirectUrl is invalid');
t.equal(err.statusCode, 400);
}
});
t.test('when redirectUrl list is huge', async (t) => {
body['redirectUrl'] = Array(101).fill('http://localhost:8080');
try {
await apiController.config(body as IdPConfig);
t.fail('Expecting JacksonError.');
} catch (err: any) {
t.equal(err.message, 'Exceeded maximum number of allowed redirect urls');
t.equal(err.statusCode, 400);
}
});
t.test('when redirectUrl list contains invalid', async (t) => {
body['redirectUrl'] = '["http://localhost:8000","http://localhost::8080"]';
try {
await apiController.config(body as IdPConfig);
t.fail('Expecting JacksonError.');
} catch (err: any) {
t.equal(err.message, 'redirectUrl is invalid');
t.equal(err.statusCode, 400);
}
});
});
t.test('when `tenant` is empty', async (t) => {
const body: Partial<IdPConfig> = Object.assign({}, saml_config);
delete body['tenant'];
try {
await apiController.config(body as IdPConfig);
t.fail('Expecting JacksonError.');
} catch (err: any) {
t.equal(err.message, 'Please provide tenant');
t.equal(err.statusCode, 400);
}
t.end();
});
t.test('when `product` is empty', async (t) => {
const body: Partial<IdPConfig> = Object.assign({}, saml_config);
delete body['product'];
try {
await apiController.config(body as IdPConfig);
t.fail('Expecting JacksonError.');
} catch (err: any) {
t.equal(err.message, 'Please provide product');
t.equal(err.statusCode, 400);
}
t.end();
});
t.test('when `encodedRawMetadata` is not a valid XML', async (t) => {
const body = Object.assign({}, saml_config);
body['encodedRawMetadata'] = Buffer.from('not a valid XML', 'utf8').toString('base64');
try {
await apiController.config(body);
t.fail('Expecting Error.');
} catch (err: any) {
t.match(err.message, /Non-whitespace before first tag./);
}
t.end();
});
});
t.test('when the request is good', async (t) => {
const body = Object.assign({}, saml_config);
const kdStub = sinon.stub(dbutils, 'keyDigest').returns(CLIENT_ID);
const response = await apiController.config(body);
t.ok(kdStub.called);
t.equal(response.clientID, CLIENT_ID);
t.equal(response.idpMetadata.provider, PROVIDER);
const savedConfig = await apiController.getConfig({
clientID: CLIENT_ID,
});
t.equal(savedConfig.name, 'testConfig');
kdStub.restore();
t.end();
});
t.end();
});
t.test('Update the config', async (t) => {
const body: Partial<IdPConfig> = Object.assign({}, saml_config);
t.test('When clientID is missing', async (t) => {
const { client_secret: clientSecret } = await apiController.config(body as IdPConfig);
try {
await apiController.updateConfig({ description: 'A new description', clientSecret });
t.fail('Expecting JacksonError.');
} catch (err: any) {
t.equal(err.message, 'Please provide clientID');
t.equal(err.statusCode, 400);
t.end();
}
});
t.test('When clientSecret is missing', async (t) => {
const { clientID } = await apiController.config(body as IdPConfig);
try {
await apiController.updateConfig({ description: 'A new description', clientID });
t.fail('Expecting JacksonError.');
} catch (err: any) {
t.equal(err.message, 'Please provide clientSecret');
t.equal(err.statusCode, 400);
t.end();
}
});
t.test('Update the name/description', async (t) => {
const { clientID, clientSecret } = await apiController.config(body as IdPConfig);
const { name, description } = await apiController.getConfig({ clientID });
t.equal(name, 'testConfig');
t.equal(description, 'Just a test configuration');
await apiController.updateConfig({
clientID,
clientSecret,
name: 'A new name',
description: 'A new description',
});
const savedConfig = await apiController.getConfig({ clientID });
t.equal(savedConfig.name, 'A new name');
t.equal(savedConfig.description, 'A new description');
t.end();
});
t.end();
});
t.test('Get the config', async (t) => {
t.test('when valid request', async (t) => {
const body: Partial<IdPConfig> = Object.assign({}, saml_config);
await apiController.config(body as IdPConfig);
const savedConfig = await apiController.getConfig(body);
t.equal(savedConfig.name, 'testConfig');
t.end();
});
t.test('when invalid request', async (t) => {
let response;
const body: Partial<IdPConfig> = Object.assign({}, saml_config);
await apiController.config(body);
// Empty body
try {
await apiController.getConfig({});
t.fail('Expecting Error.');
} catch (err: any) {
t.match(err.message, 'Please provide `clientID` or `tenant` and `product`.');
}
// Invalid clientID
response = await apiController.getConfig({
clientID: 'an invalid clientID',
});
t.match(response, {});
// Invalid tenant and product combination
response = await apiController.getConfig({
tenant: 'demo.com',
product: 'desk',
});
t.match(response, {});
t.end();
});
t.end();
});
t.test('Delete the config', async (t) => {
t.test('when valid request', async (t) => {
const body: Partial<IdPConfig> = Object.assign({}, saml_config);
const { clientID, clientSecret } = await apiController.config(body);
await apiController.deleteConfig({
clientID,
clientSecret,
});
const response = await apiController.getConfig({
clientID,
});
t.match(response, {});
t.end();
});
t.test('when invalid request', async (t) => {
const body: Partial<IdPConfig> = Object.assign({}, saml_config);
const { clientID, clientSecret } = await apiController.config(body);
// Empty body
try {
await apiController.deleteConfig({});
t.fail('Expecting Error.');
} catch (err: any) {
t.match(err.message, 'Please provide `clientID` and `clientSecret` or `tenant` and `product`.');
}
// Invalid clientID or clientSecret
try {
await apiController.deleteConfig({
clientID,
clientSecret: 'invalid client secret',
});
t.fail('Expecting Error.');
} catch (err: any) {
t.match(err.message, 'clientSecret mismatch');
}
t.end();
});
t.end();
});
t.end();
}); | the_stack |
declare let sanitizeHtml;
// This private has technically been deprecated, but is present in all major browsers as of 2/12/14 and offers
// the best perf for the UTF byte manipulation we need for truncation and byte size estimation.
// DO NOT USE IT WITHOUT CHECKING IF IT EXISTS -- in case browser vendors actually remove them
// ReSharper disable once InconsistentNaming
declare function unescape(s: string): string;
import {Constants} from "../constants";
import {ObjectUtils} from "../objectUtils";
import {SupportedVideoDomains, VideoUtils} from "./videoUtils";
import {VideoExtractorFactory} from "./VideoExtractorFactory";
export interface EmbeddedVideoIFrameSrcs {
srcAttribute: string;
dataOriginalSrcAttribute: string;
}
/**
* Dom specific Helper utility methods
*/
export class DomUtils {
public static tags = {
a: "a",
b: "b",
applet: "applet",
article: "article",
audio: "audio",
base: "base",
body: "body",
br: "br",
button: "button",
canvas: "canvas",
center: "center",
cite: "cite",
code: "code",
del: "del",
div: "div",
em: "em",
embed: "embed",
figure: "figure",
font: "font",
h1: "h1",
h2: "h2",
h3: "h3",
h4: "h4",
h5: "h5",
h6: "h6",
head: "head",
header: "header",
hr: "hr",
html: "html",
i: "i",
iframe: "iframe",
img: "img",
input: "input",
li: "li",
link: "link",
main: "main",
map: "map",
menu: "menu",
menuitem: "menuitem",
meta: "meta",
meter: "meter",
noscript: "noscript",
object: "object",
ol: "ol",
p: "p",
pre: "pre",
progress: "progress",
script: "script",
span: "span",
source: "source",
strike: "strike",
strong: "strong",
style: "style",
sub: "sub",
sup: "sup",
svg: "svg",
table: "table",
td: "td",
title: "title",
tr: "tr",
u: "u",
ul: "ul",
video: "video"
};
// See the OneNote Dev Center API Reference for a list of supported attributes and tags
// https://dev.onenote.com/docs#/introduction/html-tag-support-for-pages
protected static attributesAllowedByOnml: { [index: string]: string[] } = {
"a": ["href", "name", "target"],
"img": ["src"],
"*": ["src", "background-color", "color", "font-family", "font-size", "data*", "alt", "height", "width", "style", "id", "type"]
};
protected static tableTags = [
DomUtils.tags.table,
DomUtils.tags.td,
DomUtils.tags.tr
];
protected static markupTags = [
DomUtils.tags.b,
DomUtils.tags.em,
DomUtils.tags.strong,
DomUtils.tags.i,
DomUtils.tags.u,
DomUtils.tags.strike,
DomUtils.tags.del,
DomUtils.tags.sup,
DomUtils.tags.sub,
DomUtils.tags.cite,
DomUtils.tags.font,
DomUtils.tags.pre,
DomUtils.tags.code
];
protected static htmlTags = [
DomUtils.tags.html,
DomUtils.tags.head,
DomUtils.tags.title,
DomUtils.tags.meta,
DomUtils.tags.body,
DomUtils.tags.div,
DomUtils.tags.span,
DomUtils.tags.article,
DomUtils.tags.figure,
DomUtils.tags.header,
DomUtils.tags.main,
DomUtils.tags.center,
DomUtils.tags.iframe,
DomUtils.tags.a,
DomUtils.tags.p,
DomUtils.tags.br,
DomUtils.tags.h1,
DomUtils.tags.h2,
DomUtils.tags.h3,
DomUtils.tags.h4,
DomUtils.tags.h5,
DomUtils.tags.h6,
DomUtils.tags.ul,
DomUtils.tags.ol,
DomUtils.tags.li,
DomUtils.tags.img,
DomUtils.tags.object,
DomUtils.tags.video
];
// TODO: write a module test to make sure these tagsNotSupportedInOnml and the tags above have no intersection
protected static tagsNotSupportedInOnml = [
DomUtils.tags.applet,
DomUtils.tags.audio,
DomUtils.tags.button,
DomUtils.tags.canvas,
DomUtils.tags.embed,
DomUtils.tags.hr,
DomUtils.tags.input,
DomUtils.tags.link,
DomUtils.tags.map,
DomUtils.tags.menu,
DomUtils.tags.menuitem,
DomUtils.tags.meter,
DomUtils.tags.noscript,
DomUtils.tags.progress,
DomUtils.tags.script,
DomUtils.tags.source,
DomUtils.tags.style,
DomUtils.tags.svg,
DomUtils.tags.video
];
/**
* Given an HTML Document in string form, return an HTML Document in string form
* with the attributes and the content between the HTML tags scrubbed, while preserving
* document structure
*/
public static cleanHtml(contentInHtml: string): string {
let allAttributes: string[] = [];
for (let key in DomUtils.attributesAllowedByOnml) {
if (DomUtils.attributesAllowedByOnml.hasOwnProperty(key)) {
allAttributes = allAttributes.concat(DomUtils.attributesAllowedByOnml[key]);
}
}
let tags = DomUtils.htmlTags.concat(DomUtils.markupTags).concat(DomUtils.tableTags);
let sanitizedHtml = sanitizeHtml(contentInHtml, {
allowedTags: tags,
allowedAttributes: DomUtils.attributesAllowedByOnml,
allowedSchemes: sanitizeHtml.defaults.allowedSchemes.concat(["data"]),
allowedClasses: {
"*": ["MainArticleContainer"]
}
});
return sanitizedHtml;
}
/**
* Many extensions inject their own stylings into the page, and generally that isn't a problem. But,
* occasionally the styling includes a specific font, which can be very, very large. This method
* removes any base64 encoded binaries defined in any <style> tags.
*/
public static removeStylesWithBase64EncodedBinaries(doc: Document): void {
DomUtils.domReplacer(doc, "style", (node: HTMLElement) => {
return node.innerHTML.indexOf("data:application") !== -1 ? undefined : node;
});
}
public static removeElementsNotSupportedInOnml(doc: Document): void {
// For elements that cannot be converted into something equivalent in ONML, we remove them ...
DomUtils.domReplacer(doc, DomUtils.tagsNotSupportedInOnml.join());
let tagsToTurnIntoDiv = [DomUtils.tags.main, DomUtils.tags.article, DomUtils.tags.figure, DomUtils.tags.header, DomUtils.tags.center];
// ... and for everything else, we replace them with an equivalent, preserving the inner HTML
DomUtils.domReplacer(doc, tagsToTurnIntoDiv.join(), (node: HTMLElement) => {
let div = document.createElement("div");
div.innerHTML = DomUtils.cleanHtml(node.innerHTML);
return div;
});
}
public static domReplacer(doc: Document, querySelector: string, getReplacement: (oldNode: Node, index: number) => Node = () => undefined) {
let nodes: NodeList = doc.querySelectorAll(querySelector);
for (let i = 0; i < nodes.length; i++) {
let oldNode: Node = nodes[i];
try {
let newNode = getReplacement(oldNode, i);
if (!newNode) {
oldNode.parentNode.removeChild(oldNode);
} else if (oldNode !== newNode) {
oldNode.parentNode.replaceChild(newNode, oldNode);
}
} catch (e) {
// There are some cases (like dirty canvases) where running replace will throw an error.
// We catch it, thus leaving the original.
}
}
}
public static domReplacerAsync(doc: Document, querySelector: string, getReplacement: (oldNode: Node, index: number) => Promise<Node> = () => Promise.resolve(undefined)): Promise<void> {
return new Promise<void>((resolve) => {
let nodes: NodeList = doc.querySelectorAll(querySelector);
let doneCount = 0;
if (nodes.length === 0) {
resolve();
}
for (let i = 0; i < nodes.length; i++) {
let oldNode: Node = nodes[i];
getReplacement(oldNode, i).then((newNode) => {
if (!newNode) {
oldNode.parentNode.removeChild(oldNode);
} else if (oldNode !== newNode) {
oldNode.parentNode.replaceChild(newNode, oldNode);
}
}, () => {
// There are some cases (like dirty canvases) where running replace will throw an error.
// We catch it, thus leaving the original.
}).then(() => {
if (++doneCount === nodes.length) {
resolve();
}
});
}
});
}
/**
* Gets the content type of the page based on the embed tags on the page
* returns ClipTypes.EnhancedUrl if there's an embed tag of type application/pdf
* else returns ClipTypes.Html
*/
public static getPageContentType(doc: Document): OneNoteApi.ContentType {
let anchor = doc.createElement("a");
anchor.href = doc.URL;
if (/\.pdf$/i.test(anchor.pathname)) {
return OneNoteApi.ContentType.EnhancedUrl;
}
// Check if there's a PDF embed element. We cast the element to any because the type
// property is not recognized in Typescript despite being part of the HTML5 standard.
// Additionally, Edge does not seem to respect this standard as of 10/13/16.
let embedElement = doc.querySelector("embed") as HTMLEmbedElement;
if (embedElement && /application\/pdf/i.test((<any>embedElement).type)) {
return OneNoteApi.ContentType.EnhancedUrl;
}
// Check if this was a PDF rendered with PDF.js. With PDF.js, the PDFJS object will be
// added to the window object.
if (window && (<any>window).PDFJS) {
return OneNoteApi.ContentType.EnhancedUrl;
}
return OneNoteApi.ContentType.Html;
}
/**
* Return the best CanonicalUrl for the document
* Some sites mistakenly declare multiple canonical urls. Pick the shortest one.
* (Most canonical urls involve stripping away directory index, fragments, query
* variables etc. hence we pick the shortest one as it is likely to be correct.)
*/
public static fetchCanonicalUrl(doc: Document): string {
let canonicalLinkDeclarations = doc.querySelectorAll("link[rel=canonical]");
if (canonicalLinkDeclarations.length === 0) {
return doc.URL;
} else {
let shortestHref: string = (<HTMLBaseElement>canonicalLinkDeclarations[0]).href;
let currentHref: string;
for (let i = 1; i < canonicalLinkDeclarations.length; i++) {
currentHref = (<HTMLBaseElement>canonicalLinkDeclarations.item(i)).href;
if (currentHref.length < shortestHref.length) {
shortestHref = currentHref;
}
}
return shortestHref;
}
}
/**
* Get a clone of the specified DOM, with our own UI and other unwanted tags removed, and as much CSS and canvas
* inlining as possible given the API's upload size limitation. This does not affect the document passed into the
* function.
*
* @returns The cleaned DOM
*/
public static getCleanDomOfCurrentPage(originalDoc: Document): string {
let doc = DomUtils.cloneDocument(originalDoc);
DomUtils.convertCanvasElementsToImages(doc, originalDoc);
DomUtils.addBaseTagIfNecessary(doc, originalDoc.location);
DomUtils.addImageSizeInformationToDom(doc);
DomUtils.removeUnwantedItems(doc);
let domString = DomUtils.getDomString(doc);
return domString;
}
public static removeUnwantedItems(doc: Document): void {
DomUtils.removeStylesWithBase64EncodedBinaries(doc);
DomUtils.removeClipperElements(doc);
DomUtils.removeUnwantedElements(doc);
DomUtils.removeUnwantedAttributes(doc);
DomUtils.removeUnsupportedHrefs(doc);
}
/**
* Adds any additional styling to the preview elements
*/
public static addPreviewContainerStyling(previewElement: HTMLElement) {
// What this does is add a little extra padding after each of the paragraphs in an article. This
// makes what we are saving into OneNote match what we see in the preview (which is how browsers
// render the paragraph elements).
previewElement.setAttribute("style", "margin-bottom: 16px");
}
protected static dataOriginalSrcAttribute = "data-original-src";
/**
* Add embedded videos to the article preview where supported
*/
public static addEmbeddedVideosWhereSupported(previewElement: HTMLElement, pageContent: string, pageUrl: string): Promise<EmbeddedVideoIFrameSrcs[]> {
let supportedDomain = VideoUtils.videoDomainIfSupported(pageUrl);
if (!supportedDomain) {
return Promise.resolve();
}
let iframes: HTMLIFrameElement[] = [];
try {
// Construct the appropriate videoExtractor based on the Domain we are on
let domain = SupportedVideoDomains[supportedDomain];
let extractor = VideoExtractorFactory.createVideoExtractor(domain);
// If we are on a Domain that has a valid VideoExtractor, get the embedded videos
// to render them later
if (extractor) {
iframes = iframes.concat(extractor.createEmbeddedVideosFromPage(pageUrl, pageContent));
}
} catch (e) {
// if we end up here, we're unexpectedly broken
// (e.g, vimeo schema updated, we say we're supporting a domain we don't actually, etc)
return Promise.reject({ error: JSON.stringify({ doc: previewElement.outerHTML, pageContent: pageContent, message: e.message }) });
}
return Promise.resolve(DomUtils.addVideosToElement(previewElement, iframes));
}
/**
* Create base iframe with reasonable style properties for video embed in OneNote.
*/
public static createEmbedVideoIframe(): HTMLIFrameElement {
let iframe = document.createElement("iframe");
// these values must be set inline, else the embed in OneNote won't respect them
// width and height set to preserve a 16:9 aspect ratio
iframe.width = "600";
iframe.height = "338";
iframe.frameBorder = "0";
iframe.allowFullscreen = false;
return iframe;
}
/**
* Add an array of iframes to the top of the article previewer.
* Ordering of iframes in the array will be respected.
*/
private static addVideosToElement(previewElement: HTMLElement, iframeNodes: HTMLIFrameElement[]): EmbeddedVideoIFrameSrcs[] {
if (ObjectUtils.isNullOrUndefined(previewElement) || ObjectUtils.isNullOrUndefined(iframeNodes) || iframeNodes.length === 0) {
return;
}
let videoSrcUrls: EmbeddedVideoIFrameSrcs[] = [];
let lastInsertedNode: Node;
for (let node of iframeNodes) {
if (ObjectUtils.isNullOrUndefined(node.src) || ObjectUtils.isNullOrUndefined(node.getAttribute(DomUtils.dataOriginalSrcAttribute))) {
// iframe constructed without a src or data-original-src attribute (somehow)
// invalid construction, but we want record of it happening
videoSrcUrls.push({ srcAttribute: "", dataOriginalSrcAttribute: "" });
continue;
}
lastInsertedNode = DomUtils.insertIFrame(previewElement, node, lastInsertedNode);
lastInsertedNode = DomUtils.insertSpacer(previewElement, lastInsertedNode.nextSibling);
videoSrcUrls.push({ srcAttribute: node.src, dataOriginalSrcAttribute: node.getAttribute(DomUtils.dataOriginalSrcAttribute) });
}
return videoSrcUrls;
}
/**
* Given an html element, insert a node at the top of it.
* If lastInsertedNode provided, insert the node within the html element
* but immediately after lastInsertedNode instead.
*/
private static insertIFrame(container: HTMLElement, newNode: Node, lastInsertedNode?: Node): Node {
let referenceNode;
if (ObjectUtils.isNullOrUndefined(lastInsertedNode)) {
referenceNode = container.children[0]; // initial referenceNode
} else {
referenceNode = lastInsertedNode.nextSibling;
}
return container.insertBefore(newNode, referenceNode);
}
/**
* Given an html element and a reference node, insert a <br /> node
* within the html element, before the reference node
*/
private static insertSpacer(container: HTMLElement, referenceNode: Node): Node {
let spacerNode = document.createElement("br");
return container.insertBefore(spacerNode, referenceNode);
}
/**
* Clones the document into a new document object
*/
public static cloneDocument(originalDoc: Document): Document {
return originalDoc.cloneNode(true) as Document;
}
/**
* If the head doesn't contains a 'base' tag, then add one in case relative paths are used.
* If the location is not specified, the current document's location will be used.
*/
public static addBaseTagIfNecessary(doc: Document, location?: Location) {
// Sometimes there is no head in the DOM e.g., pdfs in incognito mode
if (!doc.head) {
let headElement = doc.createElement(DomUtils.tags.head);
let htmlElement = doc.getElementsByTagName(DomUtils.tags.html)[0] as HTMLHtmlElement;
htmlElement.insertBefore(headElement, htmlElement.children[0]);
}
if (!location) {
location = document.location;
}
let bases: NodeList = doc.head.getElementsByTagName(DomUtils.tags.base);
if (bases.length === 0) {
let baseUrl = location.href.split("#")[0].split("?")[0];
baseUrl = baseUrl.substr(0, baseUrl.lastIndexOf("/") + 1);
let baseTag: HTMLBaseElement = <HTMLBaseElement>doc.createElement(DomUtils.tags.base);
baseTag.href = baseUrl;
doc.head.insertBefore(baseTag, doc.head.firstChild);
}
}
/**
* Remove blank images from the DOM. A blank image is defined as an image where all
* the pixels are either purely white or fully transparent.
*/
public static removeBlankImages(doc: Document): Promise<void> {
return DomUtils.domReplacerAsync(doc, DomUtils.tags.img, (node: Node) => {
return new Promise<Node>((resolve) => {
let img: HTMLImageElement = <HTMLImageElement>node;
// Just passing in the image node won't work as it won't render properly
// and the algorithm will think every pixel is (0,0,0,0)
let theImg = new Image();
// In Firefox, a SecurityError is thrown if the image is not CORS-enabled
theImg.crossOrigin = "anonymous";
theImg.onload = () => {
resolve(DomUtils.imageIsBlank(theImg) ? undefined : node);
};
// onload can return a non-200 in some weird cases, so we have to specify this
theImg.onerror = () => {
// Be forgiving, and assume the image is non-blank
resolve(node);
};
// The request is kicked off as soon as the src is set, so it needs to happen last
theImg.src = img.src || img.getAttribute("src");
});
});
}
/**
* Return true if every pixel in the image is either purely white or fully transparent;
* false otherwise. Assumes that the image is loaded already.
*/
public static imageIsBlank(img: HTMLImageElement) {
if (img.width === 0 || img.height === 0) {
return false;
}
let canvas = document.createElement("canvas") as HTMLCanvasElement;
canvas.width = img.width;
canvas.height = img.height;
let context = canvas.getContext("2d");
context.drawImage(img, 0, 0, img.width, img.height);
// Each pixel is a 4 index block representing RGBA
try {
let area = context.getImageData(0, 0, canvas.width, canvas.height);
let pixelArray = area.data;
for (let i = 0; i < pixelArray.length; i += 4) {
// If pixel is fully transparent
if (pixelArray[i + 3] === 0) {
continue;
}
// If pixel is purely white
if (pixelArray[i] === 255 && pixelArray[i + 1] === 255 && pixelArray[i + 2] === 255) {
continue;
}
return false;
}
return true;
} catch (e) {
// A SecurityError is sometimes thrown in Firefox on 'tainted' images
// https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image
return false;
}
}
/**
* Remove Clipper elements from the DOM entirely
*/
public static removeClipperElements(doc: Document) {
DomUtils.domReplacer(doc, [
"#" + Constants.Ids.clipperRootScript,
"#" + Constants.Ids.clipperUiFrame,
"#" + Constants.Ids.clipperExtFrame
].join());
// Remove iframes that point to local files
DomUtils.domReplacer(doc, DomUtils.tags.iframe, (node: Node) => {
let iframe: HTMLIFrameElement = <HTMLIFrameElement>node;
let src = iframe.src;
if (this.isLocalReferenceUrl(src)) {
return undefined;
}
return iframe;
});
}
/**
* Remove any references to URLs that won't work on another box (i.e. our servers)
*/
public static removeUnsupportedHrefs(doc: Document) {
DomUtils.domReplacer(doc, DomUtils.tags.link, (node: Node) => {
let linkElement: HTMLLinkElement = <HTMLLinkElement>node;
let href = linkElement.href;
if (this.isLocalReferenceUrl(href)) {
return undefined;
}
return linkElement;
});
}
/**
* Remove unwanted elements from the DOM entirely
*/
public static removeUnwantedElements(doc: Document) {
DomUtils.domReplacer(doc, [DomUtils.tags.script, DomUtils.tags.noscript].join());
}
/**
* Remove unwanted attributes from the DOM's elements
*/
public static removeUnwantedAttributes(doc: Document) {
let images = doc.getElementsByTagName("IMG");
for (let i = 0; i < images.length; i++) {
let image = images[i] as HTMLImageElement;
image.srcset = undefined;
image.removeAttribute("srcset");
}
}
/**
* Converts all images and links in a document to using absolute urls. To be
* called in the same context as the website.
*/
public static convertRelativeUrlsToAbsolute(doc: Document) {
DomUtils.domReplacer(doc, DomUtils.tags.img, (node: Node, index: number) => {
let nodeAsImage = node as HTMLImageElement;
// We don't use nodeAsImage.src as it returns undefined for relative urls
let possiblyRelativeSrcAttr = (nodeAsImage.attributes as any).src;
if (possiblyRelativeSrcAttr && possiblyRelativeSrcAttr.value) {
nodeAsImage.src = DomUtils.toAbsoluteUrl(possiblyRelativeSrcAttr.value, location.origin);
return nodeAsImage;
}
// This image has no src attribute. Assume it's rubbish!
return undefined;
});
DomUtils.domReplacer(doc, DomUtils.tags.a, (node: Node, index: number) => {
let nodeAsAnchor = node as HTMLAnchorElement;
let possiblyRelativeSrcAttr = (nodeAsAnchor.attributes as any).href;
if (possiblyRelativeSrcAttr && possiblyRelativeSrcAttr.value) {
nodeAsAnchor.href = DomUtils.toAbsoluteUrl(possiblyRelativeSrcAttr.value, location.origin);
return nodeAsAnchor;
}
// Despite the href not being present, it's best to keep the element
return node;
});
}
public static toAbsoluteUrl(url: string, base: string): string {
if (!url || !base) {
throw new Error("parameters must be non-empty, but was: " + url + ", " + base);
}
// Urls starting with "//" inherit the protocol from the rendering page, and the Clipper has a
// protocol of chrome-extension, which is incorrect. We should manually add the protocol here
if (/^\/\/[^\/]/.test(url)) {
url = location.protocol + url;
}
let uri = new URI(url);
if (uri.is("relative")) {
return uri.absoluteTo(base).valueOf();
}
return url;
}
/**
* Replace canvas elements into images
* TODO: Deal with the situation of not running over max bytes
*/
public static convertCanvasElementsToImages(doc: Document, originalDoc: Document) {
// We need to get the canvas's data from the original DOM since the cloned DOM doesn't have it
let originalCanvasElements: NodeList = originalDoc.querySelectorAll(DomUtils.tags.canvas);
DomUtils.domReplacer(doc, DomUtils.tags.canvas, (node: Node, index: number) => {
let originalCanvas = originalCanvasElements[index] as HTMLCanvasElement;
if (!originalCanvas) {
return undefined;
}
let image: HTMLImageElement = <HTMLImageElement>doc.createElement(DomUtils.tags.img);
image.src = originalCanvas.toDataURL();
image.style.cssText = window.getComputedStyle(originalCanvas).cssText;
return image;
});
}
/**
* Given a DOM, it will add image height and width information to all image tags
*/
public static addImageSizeInformationToDom(doc: Document) {
let imgs = doc.getElementsByTagName("img");
for (let i = 0; i < imgs.length; i++) {
let img = imgs[i];
if (!img.hasAttribute("data-height")) {
let height = img.height;
img.setAttribute("data-height", height.toString());
}
if (!img.hasAttribute("data-width")) {
let width = img.width;
img.setAttribute("data-width", width.toString());
}
}
}
/**
* Convert an image src url into a base 64 data url, if possible.
* Enables us to embed image data directly into a document.
* Uses idea by: https://davidwalsh.name/convert-image-data-uri-javascript
* Uses cached image idea by: https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image
*/
public static getImageDataUrl(imageSrcUrl: string): Promise<string> {
return new Promise<string>((resolve: (result: string) => void, reject: (error: OneNoteApi.GenericError) => void) => {
if (ObjectUtils.isNullOrUndefined(imageSrcUrl) || imageSrcUrl === "") {
reject({ error: "image source is undefined or empty" });
}
let image = new Image();
// see https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image for why this is needed
image.crossOrigin = "anonymous";
image.onload = () => {
let canvas = document.createElement(DomUtils.tags.canvas) as HTMLCanvasElement;
canvas.width = image.naturalWidth;
canvas.height = image.naturalHeight;
canvas.getContext("2d").drawImage(image, 0, 0);
try {
let dataUrl: string = canvas.toDataURL("image/png");
dataUrl = DomUtils.adjustImageQualityIfNecessary(canvas, dataUrl);
resolve(dataUrl);
} catch (e) {
// There are some cases (like dirty canvases) where running toDataURL will throw an error.
// We catch it and return the original image source url.
resolve(imageSrcUrl);
}
};
image.onerror = (ev: Event) => {
let erroredImg: HTMLImageElement = ev.currentTarget as HTMLImageElement;
let erroredImgSrc: string;
if (!ObjectUtils.isNullOrUndefined(erroredImg)) {
erroredImgSrc = erroredImg.src;
}
reject({ error: "onerror occurred fetching " + erroredImgSrc});
};
image.src = imageSrcUrl;
// make sure the load event fires for cached images too
if (image.complete || image.complete === undefined) {
image.src = "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///ywAAAAAAQABAAACAUwAOw==";
image.src = imageSrcUrl;
}
});
}
public static maxBytesForMediaTypes: number = 2097152 - 500; // Settings.Instance.Apis_MediaTypesHandledInMemoryMaxRequestLength - 500 byte buffer for the request padding.
/**
* If a high-quality image is too big for the request, then switch to JPEG and step down
*/
public static adjustImageQualityIfNecessary(canvas: HTMLCanvasElement, dataUrl: string, quality = 1, qualityStep = 0.1): string {
let stepDownCount = 0;
while (quality > 0 && dataUrl.length > DomUtils.maxBytesForMediaTypes) {
dataUrl = canvas.toDataURL("image/jpeg", quality);
quality -= qualityStep;
stepDownCount++;
}
return dataUrl;
}
/**
* Returns a string representing the entire document
*/
public static getDomString(doc: Document): string {
return DomUtils.getDoctype(doc) + doc.documentElement.outerHTML;
}
/**
* Returns the document represented by the dom string.
* If title is provided, the title attribute will be populated in the head element.
*/
public static getDocumentFromDomString(domString: string, title?: string): Document {
let doc = document.implementation.createHTMLDocument(title);
doc.documentElement.innerHTML = domString;
return doc;
}
/**
* Return the DOCTYPE defined in the file
*/
public static getDoctype(doc: Document): string {
let doctype: DocumentType = doc.doctype;
if (!doctype) {
// Quirks mode
return "";
}
return "<!DOCTYPE "
+ doctype.name
+ (doctype.publicId ? " PUBLIC \"" + doctype.publicId + "\"" : "")
+ (!doctype.publicId && doctype.systemId ? " SYSTEM" : "")
+ (doctype.systemId ? " \"" + doctype.systemId + "\"" : "")
+ ">";
}
public static getByteSize(s: string) {
if (unescape) {
return unescape(encodeURIComponent(s)).length;
} else {
return DomUtils.getByteArray(s).length;
}
}
public static truncateStringToByteSize(s: string, maxByteLength: number): string {
let bytes: string[] = DomUtils.getByteArray(s);
if (bytes.length <= maxByteLength) {
return s;
}
bytes = bytes.slice(0, maxByteLength);
let encoded = bytes.join("");
while (encoded.length) {
try {
encoded = encoded.slice(0, -1);
return decodeURIComponent(encoded);
} catch (e) {
// If the encoded string ends in a non-complete multibyte char, decode will throw an error
return "";
}
}
}
public static getByteArray(s: string): string[] {
return encodeURIComponent(s).match(/%..|./g) || [];
}
/**
* Gets the locale of the document
*/
public static getLocale(doc: Document): string {
// window.navigator.userLanguage is defined for IE, and window.navigator.language is defined for other browsers
let docLocale = (<HTMLElement>doc.getElementsByTagName("html")[0]).getAttribute("lang");
return docLocale ? docLocale : window.navigator.language || (<any>window.navigator).userLanguage;
}
/**
* Gets the name of the file from the Url. Right now we mainly
* support getting the name from PDF.
*/
public static getFileNameFromUrl(doc: Document): string {
let urlAnchor = doc.createElement("a");
urlAnchor.href = doc.URL;
if (urlAnchor.pathname.match(new RegExp(".pdf$", "gi"))) {
let filename = urlAnchor.pathname.match(/[^/]+$/g);
if (filename) {
return decodeURIComponent(filename.pop());
}
}
return doc.title;
}
/**
* Find all non-whitespace text nodes under the provided root node
* Uses idea by: http://stackoverflow.com/a/10730777
*/
public static textNodesNoWhitespaceUnder(root: Node): Text[] {
let a: Text[] = [];
let walk: TreeWalker = document.createTreeWalker(root, NodeFilter.SHOW_TEXT, {
acceptNode: (node: Text) => {
// Logic to determine whether to accept, reject or skip node
// In this case, only accept nodes that have content
// other than whitespace
if ( ! /^\s*$/.test(node.data) ) {
return NodeFilter.FILTER_ACCEPT;
}
}
}, false);
let n: Node = walk.nextNode();
while (n) {
a.push(n as Text);
n = walk.nextNode();
}
return a;
}
public static removeEventListenerAttributes(doc: Document): void {
// See: https://en.wikipedia.org/wiki/DOM_events
let attributesToRemove = [
"onclick",
"ondblclick",
"onmousedown",
"onmouseup",
"onmouseover",
"onmousemove",
"onmouseout",
"ondragstart",
"ondrag",
"ondragenter",
"ondragleave",
"ondragover",
"ondrop",
"ondragend",
"onkeydown",
"onkeypress",
"onkeyup",
"onload",
"onunload",
"onabort",
"onerror",
"onresize",
"onscroll",
"onselect",
"onchange",
"onsubmit",
"onreset",
"onfocus",
"onblur"
];
for (let i = 0; i < attributesToRemove.length; i++) {
let elements = doc.querySelectorAll("[" + attributesToRemove[i] + "]");
for (let j = 0; j < elements.length; j++) {
elements[j].removeAttribute(attributesToRemove[i]);
}
}
}
/*
* Mimics augmentation API cleaning and ensuring that only ONML-compliant
* elements remain
*/
public static toOnml(doc: Document): Promise<void> {
DomUtils.removeElementsNotSupportedInOnml(doc);
DomUtils.removeDisallowedIframes(doc);
DomUtils.removeUnwantedItems(doc);
DomUtils.convertRelativeUrlsToAbsolute(doc);
DomUtils.removeAllStylesAndClasses(doc);
DomUtils.removeEventListenerAttributes(doc);
return DomUtils.removeBlankImages(doc);
}
public static removeDisallowedIframes(doc: Document) {
// We also detect if the iframe is a video, and we ensure that we have
// the correct attribute set so that ONApi recognizes it
DomUtils.domReplacer(doc, DomUtils.tags.iframe, (node) => {
let src = (node as HTMLIFrameElement).src;
let supportedDomain = VideoUtils.videoDomainIfSupported(src);
if (!supportedDomain) {
return undefined;
}
let domain = SupportedVideoDomains[supportedDomain];
let extractor = VideoExtractorFactory.createVideoExtractor(domain);
return extractor.createEmbeddedVideoFromUrl(src);
});
}
private static removeAllStylesAndClasses(doc: Document): void {
DomUtils.domReplacer(doc, "*", (oldNode, index) => {
(<HTMLElement>oldNode).removeAttribute("style");
(<HTMLElement>oldNode).removeAttribute("class");
return oldNode;
});
}
private static isScrolledIntoPartialView(el: HTMLElement): boolean {
let elemTop = el.getBoundingClientRect().top;
let elemBottom = el.getBoundingClientRect().bottom;
let isVisible = elemTop < window.innerHeight && elemBottom >= 0;
return isVisible;
}
public static getScrollPercent(elem: Element, asDecimalValue = false) {
if (!elem) {
return 0;
}
let scrollValue: number = (elem.scrollTop * 1.0) / (elem.scrollHeight - elem.clientHeight);
if (asDecimalValue) {
return scrollValue;
}
return scrollValue * 100;
}
private static isLocalReferenceUrl(url: string): Boolean {
return !(url.indexOf("https://") === 0 || url.indexOf("http://") === 0);
}
} | the_stack |
import SamPlugin from "../index";
import fs from "fs";
import path from "path";
jest.mock("fs");
jest.mock("path");
const samTemplate = `
AWSTemplateFormatVersion: "2010-09-09"
Transform: AWS::Serverless-2016-10-31
Globals:
Function:
Runtime: nodejs10.x
Resources:
MyLambda:
Type: AWS::Serverless::Function
Properties:
CodeUri: src/my-lambda
Handler: app.handler
`;
test("Happy path with default constructor works", () => {
const plugin = new SamPlugin();
// @ts-ignore
fs.__clearMocks();
// @ts-ignore
fs.__setMockDirs(["."]);
// @ts-ignore
fs.__setMockFiles({ "./template.yaml": samTemplate });
// @ts-ignore
path.__clearMocks();
// @ts-ignore
path.__setMockBasenames({ "./template.yaml": "template.yaml" });
// @ts-ignore
path.__setMockDirnames({ "./template.yaml": "." });
// @ts-ignore
path.__setMockRelatives({ ".#.": "" });
const entryPoints = plugin.entry();
let afterEmit: (_compilation: any) => void;
plugin.apply({
hooks: {
afterEmit: {
tap: (n: string, f: (_compilation: any) => void) => {
afterEmit = f;
},
},
},
});
// @ts-ignore
afterEmit(null);
// @ts-ignore
expect({ entryPoints, files: fs.__getMockWrittenFiles() }).toMatchSnapshot();
});
test("Happy path with empty options in the constructor works", () => {
const plugin = new SamPlugin({});
// @ts-ignore
fs.__clearMocks();
// @ts-ignore
fs.__setMockDirs(["."]);
// @ts-ignore
fs.__setMockFiles({ "./template.yaml": samTemplate });
// @ts-ignore
path.__clearMocks();
// @ts-ignore
path.__setMockBasenames({ "./template.yaml": "template.yaml" });
// @ts-ignore
path.__setMockDirnames({ "./template.yaml": "." });
// @ts-ignore
path.__setMockRelatives({ ".#.": "" });
const entryPoints = plugin.entry();
let afterEmit: (_compilation: any) => void;
plugin.apply({
hooks: {
afterEmit: {
tap: (n: string, f: (_compilation: any) => void) => {
afterEmit = f;
},
},
},
});
// @ts-ignore
afterEmit(null);
// Does create a .vscode folder
// @ts-ignore
expect(fs.__getMockMakedirs().includes(".vscode")).toBeTruthy();
// @ts-ignore
expect({ entryPoints, files: fs.__getMockWrittenFiles() }).toMatchSnapshot();
});
test("Happy path with empty options in the constructor works and an existing .vscode folder", () => {
const plugin = new SamPlugin({});
// @ts-ignore
fs.__clearMocks();
// @ts-ignore
fs.__setMockDirs([".", ".vscode"]);
// @ts-ignore
fs.__setMockFiles({ "./template.yaml": samTemplate });
// @ts-ignore
path.__clearMocks();
// @ts-ignore
path.__setMockBasenames({ "./template.yaml": "template.yaml" });
// @ts-ignore
path.__setMockDirnames({ "./template.yaml": "." });
// @ts-ignore
path.__setMockRelatives({ ".#.": "" });
const entryPoints = plugin.entry();
let afterEmit: (_compilation: any) => void;
plugin.apply({
hooks: {
afterEmit: {
tap: (n: string, f: (_compilation: any) => void) => {
afterEmit = f;
},
},
},
});
// @ts-ignore
afterEmit(null);
// Does not create a .vscode folder
// @ts-ignore
expect(fs.__getMockMakedirs().includes(".vscode")).toBeFalsy();
// @ts-ignore
expect({ entryPoints, files: fs.__getMockWrittenFiles() }).toMatchSnapshot();
});
test("Happy path with VS Code debugging disabled", () => {
const plugin = new SamPlugin({ vscodeDebug: false });
// @ts-ignore
fs.__clearMocks();
// @ts-ignore
fs.__setMockDirs(["."]);
// @ts-ignore
fs.__setMockFiles({ "./template.yaml": samTemplate });
// @ts-ignore
path.__clearMocks();
// @ts-ignore
path.__setMockBasenames({ "./template.yaml": "template.yaml" });
// @ts-ignore
path.__setMockDirnames({ "./template.yaml": "." });
// @ts-ignore
path.__setMockRelatives({ ".#.": "" });
const entryPoints = plugin.entry();
let afterEmit: (_compilation: any) => void;
plugin.apply({
hooks: {
afterEmit: {
tap: (n: string, f: (_compilation: any) => void) => {
afterEmit = f;
},
},
},
});
// @ts-ignore
afterEmit(null);
// Does not create a .vscode folder
// @ts-ignore
expect(fs.__getMockMakedirs().includes(".vscode")).toBeFalsy();
// @ts-ignore
expect({ entryPoints, files: fs.__getMockWrittenFiles() }).toMatchSnapshot();
});
test("Happy path with multiple projects works", () => {
const plugin = new SamPlugin({ projects: { a: "project-a", b: "project-b" } });
// @ts-ignore
fs.__clearMocks();
// @ts-ignore
fs.__setMockDirs(["project-a", "project-b"]);
// @ts-ignore
fs.__setMockFiles({ "project-a/template.yaml": samTemplate, "project-b/template.yaml": samTemplate });
// @ts-ignore
path.__clearMocks();
// @ts-ignore
path.__setMockBasenames({ "project-a/template.yaml": "template.yaml", "project-b/template.yaml": "template.yaml" });
// @ts-ignore
path.__setMockDirnames({ "project-a/template.yaml": "project-a", "project-b/template.yaml": "project-b" });
// @ts-ignore
path.__setMockRelatives({ ".#project-a": "project-a", ".#project-b": "project-b" });
const entryPoints = plugin.entry();
let afterEmit: (_compilation: any) => void;
plugin.apply({
hooks: {
afterEmit: {
tap: (n: string, f: (_compilation: any) => void) => {
afterEmit = f;
},
},
},
});
// @ts-ignore
afterEmit(null);
// @ts-ignore
expect({ entryPoints, files: fs.__getMockWrittenFiles() }).toMatchSnapshot();
});
test("Happy path with multiple projects and different template names works", () => {
const plugin = new SamPlugin({ projects: { a: "project-a/template-a.yaml", b: "project-b/template-b.yaml" } });
// @ts-ignore
fs.__clearMocks();
// @ts-ignore
fs.__setMockDirs(["project-a", "project-b"]);
// @ts-ignore
fs.__setMockFiles({ "project-a/template-a.yaml": samTemplate, "project-b/template-b.yaml": samTemplate });
// @ts-ignore
path.__clearMocks();
// @ts-ignore
path.__setMockBasenames({
"project-a/template-a.yaml": "template-a.yaml",
"project-b/template-b.yaml": "template-b.yaml",
});
// @ts-ignore
path.__setMockDirnames({ "project-a/template-a.yaml": "project-a", "project-b/template-b.yaml": "project-b" });
// @ts-ignore
path.__setMockRelatives({ ".#project-a": "project-a", ".#project-b": "project-b" });
const entryPoints = plugin.entry();
let afterEmit: (_compilation: any) => void;
plugin.apply({
hooks: {
afterEmit: {
tap: (n: string, f: (_compilation: any) => void) => {
afterEmit = f;
},
},
},
});
// @ts-ignore
afterEmit(null);
// @ts-ignore
expect({ entryPoints, files: fs.__getMockWrittenFiles() }).toMatchSnapshot();
});
test("Calling apply() before entry() throws an error", () => {
const plugin = new SamPlugin();
// @ts-ignore
fs.__clearMocks();
// @ts-ignore
path.__clearMocks();
let afterEmit: (_compilation: any) => void;
plugin.apply({
hooks: {
afterEmit: {
tap: (n: string, f: (_compilation: any) => void) => {
afterEmit = f;
},
},
},
});
// @ts-ignore
expect(() => afterEmit(null)).toThrowError("It looks like AwsSamPlugin.entry() was not called");
});
test("Happy path for filename() when the Lambda is found", () => {
const plugin = new SamPlugin();
// @ts-ignore
fs.__clearMocks();
// @ts-ignore
fs.__setMockDirs(["."]);
// @ts-ignore
fs.__setMockFiles({ "./template.yaml": samTemplate });
// @ts-ignore
path.__clearMocks();
// @ts-ignore
path.__setMockBasenames({ "./template.yaml": "template.yaml" });
// @ts-ignore
path.__setMockDirnames({ "./template.yaml": "." });
// @ts-ignore
path.__setMockRelatives({ ".#.": "" });
const entryPoints = plugin.entry();
expect(plugin.filename({ chunk: { name: "MyLambda" } })).toEqual("./.aws-sam/build/MyLambda/app.js");
});
test("Fails when filename() is passed an invalid lambda name", () => {
const plugin = new SamPlugin();
// @ts-ignore
fs.__clearMocks();
// @ts-ignore
fs.__setMockDirs(["."]);
// @ts-ignore
fs.__setMockFiles({ "./template.yaml": samTemplate });
// @ts-ignore
path.__clearMocks();
// @ts-ignore
path.__setMockBasenames({ "./template.yaml": "template.yaml" });
// @ts-ignore
path.__setMockDirnames({ "./template.yaml": "." });
// @ts-ignore
path.__setMockRelatives({ ".#.": "" });
const entryPoints = plugin.entry();
//console.log("XXX", plugin.filename({chunk: { name: "FakeLambda" }}));
expect(() => plugin.filename({ chunk: { name: "FakeLambda" } })).toThrowError(
"Unable to find filename for FakeLambda"
);
});
test("Fails when there is no template.yaml or template.yml and you provided a directory", () => {
const plugin = new SamPlugin();
// @ts-ignore
fs.__clearMocks();
// @ts-ignore
fs.__setMockDirs(["."]);
// // @ts-ignore
// fs.__setMockFiles({ "./template.yaml": samTemplate });
// @ts-ignore
path.__clearMocks();
// @ts-ignore
// path.__setMockBasenames({ "./template.yaml": "template.yaml" });
// // @ts-ignore
// path.__setMockDirnames({ "./template.yaml": "." });
// @ts-ignore
path.__setMockRelatives({ ".#.": "" });
expect(() => plugin.entry()).toThrowError("Could not find template.yaml or template.yml in .");
});
test("Happy path with an output file specified", () => {
const plugin = new SamPlugin({ outFile: "index" });
// @ts-ignore
fs.__clearMocks();
// @ts-ignore
fs.__setMockDirs(["."]);
// @ts-ignore
fs.__setMockFiles({ "./template.yaml": samTemplate });
// @ts-ignore
path.__clearMocks();
// @ts-ignore
path.__setMockBasenames({ "./template.yaml": "template.yaml" });
// @ts-ignore
path.__setMockDirnames({ "./template.yaml": "." });
// @ts-ignore
path.__setMockRelatives({ ".#.": "" });
const entryPoints = plugin.entry();
let afterEmit: (_compilation: any) => void;
plugin.apply({
hooks: {
afterEmit: {
tap: (n: string, f: (_compilation: any) => void) => {
afterEmit = f;
},
},
},
});
// @ts-ignore
afterEmit(null);
// @ts-ignore
expect({ entryPoints, files: fs.__getMockWrittenFiles() }).toMatchSnapshot();
}); | the_stack |
import { dom, h } from "../../src/index";
import { expect } from "../test-utilities";
describe("dom", () => {
describe("children", () => {
it("can remove childnodes", () => {
let projection = dom.create(
h("div", [h("span", { key: 1 }), h("span", { key: 2 }), h("span", { key: 3 })])
);
let div = projection.domNode as HTMLDivElement;
expect(div.children.length).to.equal(3);
let firstSpan = div.children[0];
let lastSpan = div.children[2];
// Remove middle div
projection.update(h("div", [h("span", { key: 1 }), h("span", { key: 3 })]));
expect(div.children.length).to.equal(2);
expect(div.children[0]).to.equal(firstSpan);
expect(div.children[1]).to.equal(lastSpan);
// Remove first div
projection.update(h("div", [h("span", { key: 3 })]));
expect(div.children.length).to.equal(1);
expect(div.children[0]).to.equal(lastSpan);
// Remove last div
projection.update(h("div", []));
expect(div.children.length).to.equal(0);
});
it("can add childnodes", () => {
let projection = dom.create(h("div", [h("span", { key: 2 }), h("span", { key: 4 })]));
let div = projection.domNode as HTMLDivElement;
expect(div.children.length).to.equal(2);
let firstSpan = div.children[0];
let lastSpan = div.children[1];
projection.update(
h("div", [
h("span", { key: 1 }),
h("span", { key: 2 }),
h("span", { key: 3 }),
h("span", { key: 4 }),
h("span", { key: 5 }),
])
);
expect(div.children.length).to.equal(5);
expect(div.children[1]).to.equal(firstSpan);
expect(div.children[3]).to.equal(lastSpan);
});
it('uses "bind" instead of "key" when no "key" is present', () => {
let projection = dom.create(h("div", [h("span", { bind: {} })]));
let div = projection.domNode as HTMLDivElement;
expect(div.children.length).to.equal(1);
projection.update(h("div", [h("span", { bind: {} }), h("span", { bind: {} })]));
expect(div.children.length).to.equal(2);
});
it("can distinguish between string keys when adding", () => {
let projection = dom.create(
h("div", [h("span", { key: "one" }), h("span", { key: "three" })])
);
let div = projection.domNode as HTMLDivElement;
expect(div.children.length).to.equal(2);
let firstSpan = div.children[0];
let secondSpan = div.children[1];
projection.update(
h("div", [
h("span", { key: "one" }),
h("span", { key: "two" }),
h("span", { key: "three" }),
])
);
expect(div.childNodes.length).to.equal(3);
expect(div.childNodes[0]).to.equal(firstSpan);
expect(div.childNodes[2]).to.equal(secondSpan);
});
it("can distinguish between falsy keys when replacing", () => {
let projection = dom.create(
h("div", [
h("span", { key: false }),
h("span", <any>{ key: null }),
h("span", { key: "" }),
h("span", {}),
])
);
let div = projection.domNode as HTMLDivElement;
expect(div.children.length).to.equal(4);
let firstSpan = div.children[0];
let secondSpan = div.children[1];
let thirdSpan = div.children[2];
let fourthSpan = div.children[3];
projection.update(h("div", [h("span", { key: 0 })]));
expect(div.children.length).to.equal(1);
let newSpan = div.childNodes[0];
expect(newSpan).not.to.equal(firstSpan);
expect(newSpan).not.to.equal(secondSpan);
expect(newSpan).not.to.equal(thirdSpan);
expect(newSpan).not.to.equal(fourthSpan);
});
it("hides _false_ values to allow using && in render functions", () => {
let showMore = false;
let render = () => h("div", [h("div.summary"), showMore && h("div.rest")]);
expect(render().children).to.have.length(1);
});
it("can distinguish between string keys when deleting", () => {
let projection = dom.create(
h("div", [
h("span", { key: "one" }),
h("span", { key: "two" }),
h("span", { key: "three" }),
])
);
let div = projection.domNode as HTMLDivElement;
expect(div.children.length).to.equal(3);
let firstSpan = div.children[0];
let thirdSpan = div.children[2];
projection.update(h("div", [h("span", { key: "one" }), h("span", { key: "three" })]));
expect(div.childNodes.length).to.equal(2);
expect(div.childNodes[0]).to.equal(firstSpan);
expect(div.childNodes[1]).to.equal(thirdSpan);
});
it("can distinguish between falsy keys when deleting", () => {
let projection = dom.create(
h("div", [h("span", { key: 0 }), h("span", { key: false }), h("span", <any>{ key: null })])
);
let div = projection.domNode as HTMLDivElement;
expect(div.children.length).to.equal(3);
let firstSpan = div.children[0];
let thirdSpan = div.children[2];
projection.update(h("div", [h("span", { key: 0 }), h("span", <any>{ key: null })]));
expect(div.childNodes.length).to.equal(2);
expect(div.childNodes[0]).to.equal(firstSpan);
expect(div.childNodes[1]).to.equal(thirdSpan);
});
it("does not reorder nodes based on keys", () => {
let projection = dom.create(h("div", [h("span", { key: "a" }), h("span", { key: "b" })]));
let div = projection.domNode as HTMLDivElement;
expect(div.children.length).to.equal(2);
let firstSpan = div.children[0];
let lastSpan = div.children[1];
projection.update(h("div", [h("span", { key: "b" }), h("span", { key: "a" })]));
expect(div.childNodes.length).to.equal(2);
expect(div.childNodes[1]).to.not.equal(firstSpan);
expect(div.childNodes[0]).to.equal(lastSpan);
});
it("can insert textnodes", () => {
let projection = dom.create(h("div", [h("span", { key: 2 }), h("span", { key: 4 })]));
let div = projection.domNode as HTMLDivElement;
expect(div.children.length).to.equal(2);
let firstSpan = div.children[0];
let lastSpan = div.children[1];
projection.update(h("div", [h("span", { key: 2 }), "Text between", h("span", { key: 4 })]));
expect(div.childNodes.length).to.equal(3);
expect(div.childNodes[0]).to.equal(firstSpan);
expect(div.childNodes[2]).to.equal(lastSpan);
});
it("can update single textnodes", () => {
let projection = dom.create(h("span", [""]));
let span = projection.domNode as HTMLSpanElement;
expect(span.childNodes.length).to.equal(0);
projection.update(h("span", [undefined]));
expect(span.childNodes.length).to.equal(0);
projection.update(h("span", ["f"]));
expect(span.childNodes.length).to.equal(1);
projection.update(h("span", [undefined]));
expect(span.childNodes.length).to.equal(0);
projection.update(h("span", [""]));
expect(span.childNodes.length).to.equal(0);
projection.update(h("span", [" "]));
expect(span.childNodes.length).to.equal(1);
});
it("will throw an error when maquette is not sure which node is added", () => {
let projection = dom.create(h("div", [h("span", ["a"]), h("span", ["c"])]));
expect(() => {
projection.update(h("div", [h("span", ["a"]), h("span", ["b"]), h("span", ["c"])]));
}).to.throw();
});
it("will throw an error when maquette is not sure which node is removed", () => {
let projection = dom.create(h("div", [h("span", ["a"]), h("span", ["b"]), h("span", ["c"])]));
expect(() => {
projection.update(h("div", [h("span", ["a"]), h("span", ["c"])]));
}).to.throw();
});
it("allows a contentEditable tag to be altered", () => {
let text = "initial value";
let handleInput = (evt: Event) => {
text = (evt.currentTarget as HTMLElement).innerHTML;
};
let render = () =>
h("div", {
contentEditable: true,
oninput: handleInput,
innerHTML: text,
});
let projection = dom.create(render());
// The user clears the value
projection.domNode.removeChild(projection.domNode.firstChild);
handleInput(<any>{ currentTarget: projection.domNode });
projection.update(render());
// The user enters a new value
projection.domNode.innerHTML = "changed <i>value</i>";
handleInput(<any>{ currentTarget: projection.domNode });
projection.update(render());
expect(projection.domNode.innerHTML).to.equal("changed <i>value</i>");
});
describe("svg", () => {
it("creates and updates svg dom nodes with the right namespace", () => {
let projection = dom.create(
h("div", [
h("svg", [
h("circle", { cx: "2cm", cy: "2cm", r: "1cm", fill: "red" }),
h("image", { href: "/image.jpeg" }),
]),
h("span"),
])
);
let svg = projection.domNode.firstChild as SVGElement;
expect(svg.namespaceURI).to.equal("http://www.w3.org/2000/svg");
let circle = svg.firstChild as SVGElement;
expect(circle.namespaceURI).to.equal("http://www.w3.org/2000/svg");
let image = svg.lastChild as SVGElement;
expect(image.attributes[0].namespaceURI).to.equal("http://www.w3.org/1999/xlink");
let span = projection.domNode.lastChild as SVGElement;
expect(span.namespaceURI).to.equal("http://www.w3.org/1999/xhtml");
projection.update(
h("div", [
h("svg", [
h("circle", {
key: "blue",
cx: "2cm",
cy: "2cm",
r: "1cm",
fill: "blue",
}),
h("image", { href: "/image2.jpeg" }),
]),
h("span"),
])
);
let blueCircle = svg.firstChild as SVGElement;
expect(blueCircle.namespaceURI).to.equal("http://www.w3.org/2000/svg");
});
it("updates svg dom properties with numbers", () => {
let projection = dom.create(
h("div", [
h("svg", [
h("circle", { cx: 20, cy: 30, r: 40, fill: "yellow" }),
h("rect", { width: 50, height: 60, x: 70, y: 80, fill: "black" }),
]),
])
);
let svg = projection.domNode.firstChild;
let circle = svg.firstChild as SVGCircleElement;
expect(circle.getAttribute("cx")).to.equal("20");
expect(circle.getAttribute("fill")).to.equal("yellow");
let rect = svg.lastChild as SVGRectElement;
expect(rect.getAttribute("height")).to.equal("60");
expect(rect.getAttribute("y")).to.equal("80");
projection.update(
h("div", [
h("svg", [
h("circle", { cx: 20, cy: 120, r: 40, fill: "yellow" }),
h("rect", { width: 50, height: 60, x: 70, y: 80, fill: "black" }),
]),
])
);
expect(circle.getAttribute("cy")).to.equal("120");
});
});
});
}); | the_stack |
import { TimePicker } from "../../src/timepicker/timepicker";
import { MaskedDateTime } from '../../src/maskbase/masked-date-time';
import { createElement,L10n, Ajax, loadCldr} from '@syncfusion/ej2-base';
function loadCultureFiles(name: string, base?: boolean): void {
let files: string[] = !base ?
['ca-gregorian.json', 'numbers.json', 'timeZoneNames.json', 'currencies.json'] : ['numberingSystems.json'];
files.push('weekData.json');
for (let prop of files) {
let val: Object;
let ajax: Ajax;
if (base || prop === 'weekData.json') {
ajax = new Ajax('base/spec/cldr/supplemental/' + prop, 'GET', false);
} else {
ajax = new Ajax('base/spec/cldr/main/' + name + '/' + prop, 'GET', false);
}
ajax.onSuccess = (value: JSON) => {
val = value;
};
ajax.send();
loadCldr(JSON.parse(<string>val));
}
}
L10n.load({
'en': {
'timepicker': { day: 'day' , month: 'month', year: 'year', hour: 'hour' ,minute: 'minute', second:'second' }
},
'de': {
'timepicker': { day: 'Tag' , month: 'Monat', year: 'Jahr', hour: 'Stunde' ,minute: 'Minute', second:'Sekunden' }
},
'zh': {
'timepicker': { day: '日' , month: '月', year: '年', hour: '小時' ,minute: '分鐘', second:'第二' }
},
'ja': {
'timepicker': { day: '日' , month: '月', year: '年', hour: '時間' ,minute: '分', second:'秒'}
},
});
let clickEvent: MouseEvent = document.createEvent('MouseEvents');
clickEvent.initEvent('mousedown', true, true);
TimePicker.Inject(MaskedDateTime);
describe('Timepicker', () => {
beforeAll(() => {
const isDef = (o: any) => o !== undefined && o !== null;
if (!isDef(window.performance)) {
console.log("Unsupported environment, window.performance.memory is unavailable");
this.skip(); //Skips test (in Chai)
return;
}
});
describe('Masked TimePicker', () => {
let timepicker: any;
let maskedDateTime: any;
let keyEventArgs: any = {
preventDefault: (): void => { /** NO Code */ },
stopPropagation:(): void=>{},
action: 'ArrowLeft',
code: 'ArrowLeft',
key: 'ArrowLeft'
};
let mouseEventArgs: any = { preventDefault: function () { }, target: null };
beforeAll(() => {
let ele: HTMLElement = createElement('input', { id: 'date' });
document.body.appendChild(ele);
});
afterAll(() => {
if (timepicker) {
timepicker.destroy();
}
document.body.innerHTML = '';
maskedDateTime = new MaskedDateTime();
maskedDateTime.destroy();
});
it('default rendering without enableMask property ', () => {
let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
document.body.appendChild(inputEle);
timepicker = new TimePicker();
timepicker.appendTo('#timepicker');
expect(timepicker.element.value).toBe('');
expect(timepicker.value).toBe(null);
});
it('default rendering with enableMask property ', () => {
let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
document.body.appendChild(inputEle);
timepicker = new TimePicker({enableMask: true , format: 'hh:mm:ss'});
timepicker.appendTo('#timepicker');
expect(timepicker.element.value).toBe('hour:minute:second');
expect(timepicker.value).toBe(null);
});
it('Rendering with maskPlaceholder as custom type ', () => {
let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
document.body.appendChild(inputEle);
timepicker = new TimePicker({enableMask: true, format: 'hh:mm:ss' , maskPlaceholder: {hour: 'h.', minute: 'm.', second: 's.'}});
timepicker.appendTo('#timepicker');
expect(timepicker.element.value).toBe('h.:m.:s.');
expect(timepicker.value).toBe(null);
});
// it('Clear button', () => {
// let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
// document.body.appendChild(inputEle);
// timepicker = new TimePicker({enableMask: true , format: 'hh:mm:ss' , value: new Date('01/01/2021 01:01:01')});
// timepicker.appendTo('#timepicker');
// expect(timepicker.element.value).toBe('01:01:01');
// expect(+timepicker.value).toBe(+new Date());
// (<HTMLInputElement>document.getElementsByClassName('e-clear-icon')[0]).dispatchEvent(clickEvent);
// expect(timepicker.element.value).toBe('hour:minute:second');
// expect(timepicker.value).toBe(null);
// });
it('Selection navigation using keyboard action', () => {
let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
document.body.appendChild(inputEle);
timepicker = new TimePicker({enableMask: true , format: 'hh:mm:ss'});
timepicker.appendTo('#timepicker');
timepicker.focusIn();
expect(timepicker.element.value).toBe('hour:minute:second');
timepicker.element.selectionStart= 0;
timepicker.element.selectionEnd= 4 ;
expect(timepicker.value).toBe(null);
keyEventArgs.action = 'right';
keyEventArgs.key = 'ArrowRight';
timepicker.inputHandler(keyEventArgs);
expect(timepicker.element.selectionStart).toBe(5);
expect(timepicker.element.selectionEnd).toBe(11);
timepicker.inputHandler(keyEventArgs);
expect(timepicker.element.selectionStart).toBe(12);
expect(timepicker.element.selectionEnd).toBe(18);
keyEventArgs.action = 'left';
keyEventArgs.key = 'ArrowLeft';
timepicker.inputHandler(keyEventArgs);
expect(timepicker.element.selectionStart).toBe(5);
expect(timepicker.element.selectionEnd).toBe(11);
timepicker.inputHandler(keyEventArgs);
expect(timepicker.element.selectionStart).toBe(0);
expect(timepicker.element.selectionEnd).toBe(4);
});
it('Selection navigation using tab and shiftTab ', () => {
let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
document.body.appendChild(inputEle);
timepicker = new TimePicker({enableMask: true , format: 'hh:mm:ss'});
timepicker.appendTo('#timepicker');
timepicker.focusIn();
expect(timepicker.element.value).toBe('hour:minute:second');
timepicker.element.selectionStart= 0;
timepicker.element.selectionEnd= 4 ;
keyEventArgs.action = 'tab';
keyEventArgs.key = 'Tab';
timepicker.inputHandler(keyEventArgs);
expect(timepicker.element.selectionStart).toBe(5);
expect(timepicker.element.selectionEnd).toBe(11);
timepicker.inputHandler(keyEventArgs);
expect(timepicker.element.selectionStart).toBe(12);
expect(timepicker.element.selectionEnd).toBe(18);
keyEventArgs.action = 'shiftTab'
timepicker.inputHandler(keyEventArgs);
expect(timepicker.element.selectionStart).toBe(5);
expect(timepicker.element.selectionEnd).toBe(11);
timepicker.inputHandler(keyEventArgs);
expect(timepicker.element.selectionStart).toBe(0);
expect(timepicker.element.selectionEnd).toBe(4);
});
it('Increment and decrement of date using up arrow', () => {
let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
document.body.appendChild(inputEle);
timepicker = new TimePicker({enableMask: true , format: 'hh:mm:ss'});
timepicker.appendTo('#timepicker');
timepicker.focusIn();
expect(timepicker.element.value).toBe('hour:minute:second');
timepicker.element.selectionStart = 0;
timepicker.element.selectionEnd = 4;
keyEventArgs.key = 'ArrowUp';
let date: Date = new Date();
timepicker.inputHandler(keyEventArgs);
expect(timepicker.element.value).toBe('01:minute:second');
expect(timepicker.value).toBe(null);
timepicker.element.selectionStart = 0;
timepicker.element.selectionEnd = 2;
keyEventArgs.key = 'ArrowDown';
timepicker.inputHandler(keyEventArgs);
expect(timepicker.element.value).toBe('12:minute:second');
expect(timepicker.value).toBe(null);
});
it('Value property', () => {
let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
document.body.appendChild(inputEle);
timepicker = new TimePicker({enableMask: true , format: 'hh:mm:ss aa'});
timepicker.appendTo('#timepicker');
timepicker.focusIn();
timepicker.mouseUpHandler(mouseEventArgs);
expect(timepicker.element.value).toBe('hour:minute:second AM');
expect(timepicker.value).toBe(null);
timepicker.element.value = '1:minute:second AM';
timepicker.element.selectionStart = 1;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:minute:second AM');
expect(timepicker.value).toBe(null);
timepicker.element.value = '01:2:second AM';
timepicker.element.selectionStart = 4;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:02:second AM');
expect(timepicker.value).toBe(null);
timepicker.element.value = '01:02:9 AM';
timepicker.element.selectionStart = 7;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:02:09 AM');
expect(timepicker.value).toBe(null);
timepicker.element.value = '01:02:09 P';
timepicker.element.selectionStart = 10;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:02:09 PM');
expect(timepicker.value).toBe(null);
timepicker.inputBlurHandler()
expect((timepicker.value.getHours() % 12).toString()+':'+timepicker.value.getMinutes().toString()+':'+timepicker.value.getSeconds().toString()).toBe('1:2:9');
});
it('24 hours format test', () => {
let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
document.body.appendChild(inputEle);
timepicker = new TimePicker({enableMask: true , format: 'HH:mm'});
timepicker.appendTo('#timepicker');
timepicker.focusIn();
timepicker.mouseUpHandler(mouseEventArgs);
expect(timepicker.element.value).toBe('hour:minute');
expect(timepicker.value).toBe(null);
timepicker.element.value = '1:minute';
timepicker.element.selectionStart = 1;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:minute');
expect(timepicker.value).toBe(null);
timepicker.element.value = '01:2';
timepicker.element.selectionStart = 4;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:02');
expect(timepicker.value).toBe(null);
});
it('Deletion', () => {
let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
document.body.appendChild(inputEle);
timepicker = new TimePicker({enableMask: true , format: 'hh:mm:ss aa'});
timepicker.appendTo('#timepicker');
timepicker.focusIn();
timepicker.mouseUpHandler(mouseEventArgs);
expect(timepicker.element.value).toBe('hour:minute:second AM');
expect(timepicker.value).toBe(null);
timepicker.element.value = '1:minute:second AM';
timepicker.element.selectionStart = 1;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:minute:second AM');
expect(timepicker.value).toBe(null);
timepicker.element.value = '01:2:second AM';
timepicker.element.selectionStart = 4;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:02:second AM');
expect(timepicker.value).toBe(null);
timepicker.element.value = '01:02:9 AM';
timepicker.element.selectionStart = 7;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:02:09 AM');
expect(timepicker.value).toBe(null);
timepicker.element.value = '01:02:09 P';
timepicker.element.selectionStart = 10;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:02:09 PM');
expect(timepicker.value).toBe(null);
timepicker.element.value = '01:02: PM';
timepicker.element.selectionStart = 6;
timepicker.element.selectionEnd = 8;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:02:second PM');
expect(timepicker.value).toBe(null);
timepicker.element.value = '01::second PM';
timepicker.element.selectionStart = 3;
timepicker.element.selectionEnd = 5;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:minute:second PM');
expect(timepicker.value).toBe(null);
timepicker.element.value = ':minute:second PM';
timepicker.element.selectionStart = 0;
timepicker.element.selectionEnd = 2;
keyEventArgs.action = keyEventArgs.key = keyEventArgs.code = 'Delete'
timepicker.keydownHandler(keyEventArgs);
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('hour:minute:second PM');
expect(timepicker.value).toBe(null);
});
// it('strict mode ', () => {
// let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
// document.body.appendChild(inputEle);
// timepicker = new TimePicker({enableMask: true , format: 'hh:mm:ss' , strictMode: true});
// timepicker.appendTo('#timepicker');
// timepicker.focusIn();
// timepicker.mouseUpHandler(mouseEventArgs);
// expect(timepicker.element.value).toBe('hour:minute:second');
// expect(timepicker.value).toBe(null);
// timepicker.element.value = '1:minute:second';
// timepicker.element.selectionStart = 1;
// timepicker.inputEventHandler();
// expect(timepicker.element.value).toBe('01:minute:second');
// expect(timepicker.value).toBe(null);
// timepicker.element.value = '01:2:second';
// timepicker.element.selectionStart = 4;
// timepicker.inputEventHandler();
// expect(timepicker.element.value).toBe('01:02:second');
// expect(timepicker.value).toBe(null);
// timepicker.element.value = '01:02:9';
// timepicker.element.selectionStart = 7;
// timepicker.inputEventHandler();
// expect(timepicker.element.value).toBe('01:02:09');
// expect(timepicker.value).toBe(null);
// timepicker.element.value = '01:02:09f';
// timepicker.inputBlurHandler()
// expect(timepicker.element.value).toBe('01:02:09');
// // expect(timepicker.value).toBe(null);
// });
// it('strict mode with min and max property', () => {
// let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
// document.body.appendChild(inputEle);
// timepicker = new TimePicker({enableMask: true , format: 'hh:mm:ss' , strictMode: true , min: new Date(1,1,2021,1,1,1),
// max: new Date(1,1,2021,5,5,5)
// });
// timepicker.appendTo('#timepicker');
// timepicker.focusIn();
// timepicker.mouseUpHandler(mouseEventArgs);
// expect(timepicker.element.value).toBe('hour:minute:second');
// expect(timepicker.value).toBe(null);
// timepicker.element.value = '6:minute:second';
// timepicker.element.selectionStart = 1;
// timepicker.inputEventHandler();
// expect(timepicker.element.value).toBe('06:minute:second');
// expect(timepicker.value).toBe(null);
// timepicker.element.value = '06:2:second';
// timepicker.element.selectionStart = 4;
// timepicker.inputEventHandler();
// expect(timepicker.element.value).toBe('06:02:second');
// expect(timepicker.value).toBe(null);
// timepicker.element.value = '06:02:9';
// timepicker.element.selectionStart = 7;
// timepicker.inputEventHandler();
// expect(timepicker.element.value).toBe('06:02:09');
// expect(timepicker.value).toBe(null);
// timepicker.inputBlurHandler()
// expect(timepicker.element.value).toBe('05:01:01');
// // expect(timepicker.value).toBe(null);
// });
it('RTL mode with maskedDatepicker ', () => {
let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
document.body.appendChild(inputEle);
timepicker = new TimePicker({enableMask: true , format: 'hh:mm:ss' ,enableRtl: true});
timepicker.appendTo('#timepicker');
timepicker.focusIn();
timepicker.mouseUpHandler(mouseEventArgs);
expect(timepicker.element.value).toBe('hour:minute:second');
expect(timepicker.value).toBe(null);
timepicker.element.value = '1:minute:second';
timepicker.element.selectionStart = 1;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:minute:second');
expect(timepicker.value).toBe(null);
timepicker.element.value = '01:2:second';
timepicker.element.selectionStart = 4;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:02:second');
expect(timepicker.value).toBe(null);
timepicker.element.value = '01:02:9';
timepicker.element.selectionStart = 7;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:02:09');
expect(timepicker.value).toBe(null);
timepicker.inputBlurHandler()
// expect(+timepicker.value).toBe(+new Date());
});
// it('Enable mask with placeholder', () => {
// let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
// document.body.appendChild(inputEle);
// timepicker = new TimePicker({enableMask: true , format: 'hh:mm:ss' , placeholder: 'Enter the date'});
// timepicker.appendTo('#timepicker');
// expect(timepicker.element.value).toBe('');
// expect(timepicker.value).toBe(null);
// timepicker.focusIn();
// expect(timepicker.element.value).toBe('hour:minute:second');
// expect(timepicker.value).toBe(null);
// timepicker.focusOut();
// expect(timepicker.element.value).toBe('');
// expect(timepicker.value).toBe(null);
// timepicker.focusIn();
// timepicker.element.value = '1:minute:second';
// timepicker.element.selectionStart = 1;
// timepicker.inputEventHandler();
// expect(timepicker.element.value).toBe('01:minute:second');
// timepicker.focusOut();
// expect(timepicker.element.value).toBe('01:minute:second');
// expect(timepicker.value).toBe(null);
// });
// it('FloatLabel type as auto', () => {
// let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
// document.body.appendChild(inputEle);
// timepicker = new TimePicker({enableMask: true , format: 'dd/MM/yy ddd' , placeholder: 'Enter the date' , floatLabelType: 'Auto'});
// timepicker.appendTo('#timepicker');
// expect(document.querySelector('.e-float-text').innerHTML).toBe('Enter the date');
// expect(document.getElementsByClassName('e-float-text')[0].classList.contains('e-label-bottom')).toBe(true);
// expect(timepicker.element.value).toBe('');
// expect(timepicker.value).toBe(null);
// timepicker.focusIn();
// expect(timepicker.element.value).toBe('day/month/year');
// expect(document.getElementsByClassName('e-float-text')[0].classList.contains('e-label-top')).toBe(true);
// expect(timepicker.value).toBe(null);
// timepicker.focusOut();
// expect(timepicker.element.value).toBe('');
// expect(document.getElementsByClassName('e-float-text')[0].classList.contains('e-label-bottom')).toBe(true);
// expect(timepicker.value).toBe(null);
// timepicker.focusIn();
// timepicker.element.value = '1/month/year';
// timepicker.element.selectionStart = 1;
// timepicker.inputEventHandler();
// expect(timepicker.element.value).toBe('01/month/year');
// timepicker.focusOut();
// expect(timepicker.element.value).toBe('01/month/year');
// expect(document.getElementsByClassName('e-float-text')[0].classList.contains('e-label-top')).toBe(true);
// expect(timepicker.value).toBe(null);
// });
// it('FloatLabel type as always', () => {
// let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
// document.body.appendChild(inputEle);
// timepicker = new TimePicker({enableMask: true , format: 'dd/MM/yy ddd' , placeholder: 'Enter the date' , floatLabelType: 'Always'});
// timepicker.appendTo('#timepicker');
// expect(document.querySelector('.e-float-text').innerHTML).toBe('Enter the date');
// expect(document.getElementsByClassName('e-float-text')[0].classList.contains('e-label-top')).toBe(true);
// expect(timepicker.element.value).toBe('day/month/year');
// expect(timepicker.value).toBe(null);
// timepicker.focusIn();
// expect(timepicker.element.value).toBe('day/month/year');
// expect(document.getElementsByClassName('e-float-text')[0].classList.contains('e-label-top')).toBe(true);
// expect(timepicker.value).toBe(null);
// timepicker.focusOut();
// expect(timepicker.element.value).toBe('');
// expect(document.getElementsByClassName('e-float-text')[0].classList.contains('e-label-top')).toBe(true);
// expect(timepicker.value).toBe(null);
// timepicker.focusIn();
// timepicker.element.value = '1/month/year';
// timepicker.element.selectionStart = 1;
// timepicker.inputEventHandler();
// expect(timepicker.element.value).toBe('01/month/year');
// timepicker.focusOut();
// expect(timepicker.element.value).toBe('01/month/year');
// expect(document.getElementsByClassName('e-float-text')[0].classList.contains('e-label-top')).toBe(true);
// expect(timepicker.value).toBe(null);
// });
it('culture(ja) test case', () => {
loadCultureFiles('ja');
timepicker = new TimePicker({
locale: 'ja',
format: 'hh:mm:ss',
enableMask: true
});
timepicker.appendTo('#date');
expect(timepicker.locale).toBe('ja');
timepicker.focusIn();
expect(timepicker.element.value).toBe('時間:分:秒');
});
// it('culture(zh) test case', () => {
// loadCultureFiles('zh');
// timepicker = new TimePicker({
// locale: 'zh',
// format: 'hh:mm:ss',
// enableMask: true
// });
// timepicker.appendTo('#date');
// expect(timepicker.locale).toBe('zh');
// timepicker.focusIn();
// setTimeout(() => {
// expect(timepicker.element.value).toBe('小時:分鐘:第二');
// },100);
// timepicker.focusOut();
// });
// it('culture(de) test case', () => {
// loadCultureFiles('ja');
// timepicker = new TimePicker({
// locale: 'de',
// format: 'hh:mm:ss',
// enableMask: true
// });
// timepicker.appendTo('#date');
// expect(timepicker.locale).toBe('de');
// expect(timepicker.element.value).toBe('Stunde:Minute:Sekunden');
// });
it('dynamic mask module ', () => {
let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
document.body.appendChild(inputEle);
timepicker = new TimePicker({format: 'hh:mm:ss'});
timepicker.appendTo('#timepicker');
expect(timepicker.element.value).toBe('');
expect(timepicker.value).toBe(null);
timepicker.enableMask = true;
timepicker.dataBind();
expect(timepicker.element.value).toBe('hour:minute:second');
expect(timepicker.value).toBe(null);
timepicker.enableMask = false;
timepicker.dataBind();
expect(timepicker.element.value).toBe('');
});
// it('ChangeEvent ', () => {
// let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
// document.body.appendChild(inputEle);
// timepicker = new TimePicker({enableMask: true , format: 'dd/MM/yyyy' ,
// change: function(args) {
// expect(+args.value).toBe(+new Date('01/01/2012'));
// }
// });
// timepicker.appendTo('#timepicker');
// timepicker.focusIn();
// timepicker.mouseUpHandler(mouseEventArgs);
// expect(timepicker.element.value).toBe('hour:minute:second');
// expect(timepicker.value).toBe(null);
// timepicker.element.value = '1:minute:second';
// timepicker.element.selectionStart = 1;
// timepicker.inputEventHandler();
// expect(timepicker.element.value).toBe('01:minute:second');
// expect(timepicker.value).toBe(null);
// timepicker.element.value = '01:2:second';
// timepicker.element.selectionStart = 4;
// timepicker.inputEventHandler();
// expect(timepicker.element.value).toBe('01:02:second');
// expect(timepicker.value).toBe(null);
// timepicker.element.value = '01:02:9';
// timepicker.element.selectionStart = 7;
// timepicker.inputEventHandler();
// expect(timepicker.element.value).toBe('01:02:09');
// expect(timepicker.value).toBe(null);
// timepicker.inputBlurHandler()
// // expect(+timepicker.value).toBe(+new Date());
// });
it('With format property ', () => {
let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
document.body.appendChild(inputEle);
timepicker = new TimePicker({enableMask: true , format: 'h:m:s H'});
timepicker.appendTo('#timepicker');
expect(timepicker.element.value).toBe('hour:minute:second hour');
expect(timepicker.value).toBe(null);
});
it('With format property -1 ', () => {
let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
document.body.appendChild(inputEle);
timepicker = new TimePicker({enableMask: true , format: 'HH:mm:ss'});
timepicker.appendTo('#timepicker');
expect(timepicker.element.value).toBe('hour:minute:second');
expect(timepicker.value).toBe(null);
});
it('with format property -2', () => {
let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
document.body.appendChild(inputEle);
timepicker = new TimePicker({enableMask: true , format: 'hh:mm:ss aa'});
timepicker.appendTo('#timepicker');
expect(timepicker.element.value).toBe('hour:minute:second AM');
expect(timepicker.value).toBe(null);
});
// it('with format property -3', () => {
// let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
// document.body.appendChild(inputEle);
// timepicker = new TimePicker({enableMask: true , format: 'hh:mm:ss zzzz'});
// timepicker.appendTo('#timepicker');
// // expect(timepicker.element.value).toBe('hour:minute:second +05:30+5');
// expect(timepicker.value).toBe(null);
// });
// it('with format property -4', () => {
// let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
// document.body.appendChild(inputEle);
// timepicker = new TimePicker({enableMask: true , format: 'hh:mm:ss zz'});
// timepicker.appendTo('#timepicker');
// // expect(timepicker.element.value).toBe('hour:minute:second +05');
// expect(timepicker.value).toBe(null);
// });
});
describe('EJ2-54456-When enabling mask support, the change event will not be triggered', () => {
let timepicker: any;
let maskedDateTime: any;
let keyEventArgs: any = {
preventDefault: (): void => { /** NO Code */ },
stopPropagation:(): void=>{},
action: 'ArrowUp',
code: 'ArrowUp',
key: 'ArrowUp'
};
beforeAll(() => {
let ele: HTMLElement = createElement('input', { id: 'date' });
document.body.appendChild(ele);
});
afterAll(() => {
if (timepicker) {
timepicker.destroy();
}
document.body.innerHTML = '';
maskedDateTime = new MaskedDateTime();
maskedDateTime.destroy();
});
it('Testing change event ', () => {
let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
document.body.appendChild(inputEle);
timepicker = new TimePicker({enableMask: true , format: 'h:mm a', change: function(args){
expect(args.text === timepicker.inputElement.value).toBe(true);
},
});
timepicker.appendTo('#timepicker');
timepicker.focusIn();
expect(timepicker.inputWrapper.container.classList.contains('e-input-focus')).toBe(true);
timepicker.liCollections[0].click();
expect(timepicker.liCollections[0].classList.contains('e-active')).toBe(true);
expect(timepicker.inputElement.value === "12:00 AM").toBe(true);
timepicker.element.selectionStart = 0;
timepicker.element.selectionEnd = 2;
keyEventArgs.action = 'shiftTab';
timepicker.inputHandler(keyEventArgs);
timepicker.inputBlurHandler();
expect(timepicker.inputElement.value === "1:00 AM").toBe(true);
timepicker.element.selectionStart = 0;
timepicker.element.selectionEnd = 2;
keyEventArgs.key = 'ArrowDown';
timepicker.inputHandler(keyEventArgs);
timepicker.inputBlurHandler();
expect(timepicker.inputElement.value === "12:00 AM").toBe(true);
});
});
describe('EJ2-54761', () => {
let timepicker: any;
let maskedDateTime: any;
let keyEventArgs: any = {
preventDefault: (): void => { /** NO Code */ },
stopPropagation:(): void=>{}
};
let mouseEventArgs: any = { preventDefault: function () { }, target: null };
beforeAll(() => {
let ele: HTMLElement = createElement('input', { id: 'date' });
document.body.appendChild(ele);
});
afterAll(() => {
if (timepicker) {
timepicker.destroy();
}
document.body.innerHTML = '';
maskedDateTime = new MaskedDateTime();
maskedDateTime.destroy();
});
it('Mask support in TimePicker is not working properly when typing value starts with 0', () => {
let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
document.body.appendChild(inputEle);
timepicker = new TimePicker({enableMask: true , format: "HH:mm a"});
timepicker.appendTo('#timepicker');
timepicker.focusIn();
expect(timepicker.element.value).toBe('hour:minute AM');
timepicker.element.selectionStart= 0;
timepicker.element.selectionEnd= 4
timepicker.mouseUpHandler(mouseEventArgs);
timepicker.element.value = '0:minute AM';
timepicker.element.selectionStart = 1;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('00:minute AM');
expect(timepicker.value).toBe(null);
timepicker.element.selectionStart = 0;
timepicker.element.selectionEnd = 1;
timepicker.element.value = '1:minute AM';
timepicker.element.selectionStart = 1;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:minute AM');
expect(timepicker.element.selectionStart).toBe(3);
expect(timepicker.element.selectionEnd).toBe(9);
timepicker.element.selectionStart = 3;
timepicker.element.selectionEnd = 9;
timepicker.element.value = '01:2 AM';
timepicker.element.selectionStart = 4;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:02 AM');
expect(timepicker.value).toBe(null);
timepicker.element.selectionStart = 3;
timepicker.element.selectionEnd = 5;
timepicker.element.value = '01:3 AM';
timepicker.element.selectionStart = 4;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:23 AM');
expect(timepicker.element.selectionStart).toBe(6);
expect(timepicker.element.selectionEnd).toBe(8);
});
});
describe('EJ2-56789: change event is not trigger when mask property is enable and manually update the time in input', () => {
let timepicker: any;
let maskedDateTime: any;
beforeAll(() => {
let ele: HTMLElement = createElement('input', { id: 'date' });
document.body.appendChild(ele);
});
afterAll(() => {
if (timepicker) {
timepicker.destroy();
}
document.body.innerHTML = '';
maskedDateTime = new MaskedDateTime();
maskedDateTime.destroy();
});
it('Testing change event trigger', () => {
let inputEle: HTMLElement = createElement('input', { id: 'timepicker' });
document.body.appendChild(inputEle);
timepicker = new TimePicker({enableMask: true , format: "hh:mm a", change: function(args) {
expect((args.value) == (timepicker.value)).toBe(true);
}});
timepicker.appendTo('#timepicker');
timepicker.focusIn();
timepicker.liCollections[0].click();
expect(timepicker.inputElement.value === "12:00 AM").toBe(true);
timepicker.element.selectionStart= 0;
timepicker.element.selectionEnd= 2
timepicker.element.value = '1:00 AM';
timepicker.element.selectionStart = 1;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:00 AM');
timepicker.inputBlurHandler();
timepicker.element.selectionStart = 3;
timepicker.element.selectionEnd = 4;
timepicker.element.value = '01:2 AM';
timepicker.element.selectionStart = 4;
timepicker.inputEventHandler();
expect(timepicker.element.value).toBe('01:02 AM');
timepicker.inputBlurHandler();
});
});
}); | the_stack |
import { URL } from 'url';
import http from 'https';
import { ChannelWrapper } from 'amqp-connection-manager';
import { ConsumeMessage } from 'amqplib';
import { logger } from '@lagoon/commons/dist/local-logging';
import { getRocketChatInfoForProject } from '@lagoon/commons/dist/api';
import { notificationIntToContentType, notificationContentTypeToInt, parseProblemNotification } from '@lagoon/commons/dist/notificationCommons';
export async function readFromRabbitMQ (msg: ConsumeMessage, channelWrapperLogs: ChannelWrapper): Promise<void> {
const logMessage = JSON.parse(msg.content.toString())
const {
severity,
project,
uuid,
event,
meta,
message
} = logMessage
const appId = msg.properties.appId || ""
logger.verbose(`received ${event} for project ${project}`)
var text
switch (event) {
case "github:pull_request:opened:handled":
case "gitlab:merge_request:opened:handled":
case "bitbucket:pullrequest:created:opened:handled":
text = `*[${meta.projectName}]* PR [#${meta.pullrequestNumber} (${meta.pullrequestTitle})](${meta.pullrequestUrl}) opened in [${meta.repoName}](${meta.repoUrl})`
sendToRocketChat(project, text, '#E8E8E8', ':information_source:', channelWrapperLogs, msg, appId)
break;
case "github:pull_request:synchronize:handled":
case "bitbucket:pullrequest:updated:opened:handled":
case "gitlab:merge_request:updated:handled":
text = `*[${meta.projectName}]* PR [#${meta.pullrequestNumber} (${meta.pullrequestTitle})](${meta.pullrequestUrl}) updated in [${meta.repoName}](${meta.repoUrl})`
sendToRocketChat(project, text, '#E8E8E8', ':information_source:', channelWrapperLogs, msg, appId)
break;
case "bitbucket:pullrequest:fulfilled:handled":
case "bitbucket:pullrequest:rejected:handled":
case "github:pull_request:closed:handled":
case "gitlab:merge_request:closed:handled":
text = `*[${meta.projectName}]* PR [#${meta.pullrequestNumber} (${meta.pullrequestTitle})](${meta.pullrequestUrl}) closed in [${meta.repoName}](${meta.repoUrl})`
sendToRocketChat(project, text, '#E8E8E8', ':information_source:', channelWrapperLogs, msg, appId)
break;
case "rest:pullrequest:deploy":
text = `*[${meta.projectName}]* REST pullrequest deploy trigger \`${meta.pullrequestTitle}\``
sendToRocketChat(project, text, '#E8E8E8', ':information_source:', channelWrapperLogs, msg, appId)
break;
case "github:delete:handled":
case "gitlab:remove:handled":
case "bitbucket:delete:handled":
case "api:deleteEnvironment":
text = `*[${meta.projectName}]* delete trigger \`${meta.environmentName}\``
sendToRocketChat(project, text, '#E8E8E8', ':information_source:', channelWrapperLogs, msg, appId)
break;
case "rest:remove:receive":
text = `*[${meta.projectName}]* REST remove trigger \`${meta.branchName}\``
sendToRocketChat(project, text, '#E8E8E8', ':information_source:', channelWrapperLogs, msg, appId)
break;
case "bitbucket:repo:push:handled":
case "github:push:handled":
case "gitlab:push:handled":
text = `*[${meta.projectName}]* [${meta.branchName}](${meta.repoUrl}/tree/${meta.branchName})`
if (meta.shortSha){
text = `${text} ([${meta.shortSha}](${meta.commitUrl}))`
}
text = `${text} pushed in [${meta.repoFullName}](${meta.repoUrl})`
sendToRocketChat(project, text, '#E8E8E8', ':information_source:', channelWrapperLogs, msg, appId)
break;
case "gitlab:push:skipped":
case "github:push:skipped":
case "bitbucket:push:skipped":
text = `*[${meta.projectName}]* [${meta.branchName}](${meta.repoUrl}/tree/${meta.branchName})`
if (meta.shortSha){
text = `${text} ([${meta.shortSha}](${meta.commitUrl}))`
}
text = `${text} pushed in [${meta.repoFullName}](${meta.repoUrl}) *deployment skipped*`
sendToRocketChat(project, text, '#E8E8E8', ':information_source:', channelWrapperLogs, msg, appId)
break;
case "api:deployEnvironmentLatest":
case "api:deployEnvironmentBranch":
text = `*[${meta.projectName}]* API deploy trigger \`${meta.branchName}\``
if (meta.shortSha) {
text = `${text} (${meta.shortSha})`
}
sendToRocketChat(project, text, '#E8E8E8', ':information_source:', channelWrapperLogs, msg, appId)
break;
case "rest:deploy:receive":
text = `*[${meta.projectName}]* REST deploy trigger \`${meta.branchName}\``
if (meta.shortSha) {
text = `${text} (${meta.shortSha})`
}
sendToRocketChat(project, text, '#E8E8E8', ':information_source:', channelWrapperLogs, msg, appId)
break;
case "rest:promote:receive":
text = `*[${meta.projectName}]* REST promote trigger \`${meta.branchName}\` -> \`${meta.promoteSourceEnvironment}\``
sendToRocketChat(project, text, 'gold', ':warning:', channelWrapperLogs, msg, appId)
break;
case "task:deploy-openshift:finished":
case "task:remove-openshift-resources:finished":
case "task:builddeploy-openshift:complete":
case "task:builddeploy-kubernetes:complete":
text = `*[${meta.projectName}]* `
if (meta.shortSha) {
text = `${text} \`${meta.branchName}\` (${meta.shortSha})`
} else {
text = `${text} \`${meta.branchName}\``
}
text = `${text} Build \`${meta.buildName}\` complete.`
if (meta.logLink){
text = `${text} [Logs](${meta.logLink})\n`
}
text = `${text}\n ${meta.route}\n`
if (meta.routes) {
text = `${text}\n ${meta.routes.join("\n")}`
}
sendToRocketChat(project, text, 'lawngreen', ':white_check_mark:', channelWrapperLogs, msg, appId)
break;
case "rest:pullrequest:remove":
text = `*[${meta.projectName}]* REST pullrequest remove trigger \`${meta.pullrequestNumber}\``
sendToRocketChat(project, text, 'lawngreen', ':white_check_mark:', channelWrapperLogs, msg, appId)
break;
case "task:remove-openshift:finished":
case "task:remove-kubernetes:finished":
text = `*[${meta.projectName}]* remove \`${meta.openshiftProject}\``
sendToRocketChat(project, text, 'lawngreen', ':white_check_mark:', channelWrapperLogs, msg, appId)
break;
case "task:deploy-openshift:retry":
case "task:remove-openshift:retry":
case "task:remove-kubernetes:retry":
case "task:remove-openshift-resources:retry":
text = `*[${meta.projectName}]*`
if (meta.shortSha) {
text = `${text} \`${meta.branchName}\` (${meta.shortSha})`
} else {
text = `${text} \`${meta.branchName}\``
}
text = `${text} Build \`${meta.buildName}\` failed.`
if (meta.logLink){
text = `${text} ${meta.logLink}`
}
sendToRocketChat(project, message, 'gold', ':warning:', channelWrapperLogs, msg, appId)
break;
case "task:deploy-openshift:error":
case "task:remove-openshift:error":
case "task:remove-kubernetes:error":
case "task:remove-openshift-resources:error":
case "task:builddeploy-kubernetes:failed":
case "task:builddeploy-openshift:failed":
text = `*[${meta.projectName}]*`
if (meta.shortSha) {
text = `${text} \`${meta.branchName}\` (${meta.shortSha})`
} else {
text = `${text} \`${meta.branchName}\``
}
text = `${text} Build \`${meta.buildName}\` failed.`
if (meta.logLink){
text = `${text} [Logs](${meta.logLink})\n`
}
sendToRocketChat(project, text, 'red', ':bangbang:', channelWrapperLogs, msg, appId)
break;
case "github:pull_request:closed:CannotDeleteProductionEnvironment":
case "github:push:CannotDeleteProductionEnvironment":
case "bitbucket:repo:push:CannotDeleteProductionEnvironment":
case "gitlab:push:CannotDeleteProductionEnvironment":
case "rest:remove:CannotDeleteProductionEnvironment":
text = `*[${meta.name}]* \`${meta.branchName}\` not deleted. ${meta.error}`
sendToRocketChat(project, message, 'gold', ':warning:', channelWrapperLogs, msg, appId)
break;
default:
//since there's no single point of acknowlegement of the msg, we need to keep track of whether we've handled the message
let eventHandledAsProblem = dispatchProblemEventToRocketChat(event, project, message, channelWrapperLogs, msg, appId);
if(!eventHandledAsProblem) {
return channelWrapperLogs.ack(msg);
}
}
}
const dispatchProblemEventToRocketChat = (event, project, message, channelWrapperLogs, msg, appId) => {
const problemEvent = parseProblemNotification(event);
if(problemEvent.isProblem && problemEvent.eventType == 'insert') {
sendToRocketChat(project, message, 'red', ':warning:', channelWrapperLogs, msg, appId, 'PROBLEM', problemEvent.severityLevel)
return true;
}
return false;
};
const sendToRocketChat = async (project, message, color, emoji, channelWrapperLogs, msg, appId, contentType = 'DEPLOYMENT', severityLevel = 'NONE') => {
let projectRocketChats;
try {
projectRocketChats = await getRocketChatInfoForProject(project, contentType)
}
catch (error) {
logger.error(`No RocketChat information found, error: ${error}`)
return channelWrapperLogs.ack(msg)
}
projectRocketChats.forEach(async (projectRocketChat) => {
const notificationThresholdMet = notificationContentTypeToInt(projectRocketChat.notificationSeverityThreshold) <= notificationContentTypeToInt(severityLevel);
if(contentType == 'PROBLEM' && !notificationThresholdMet) { return; } //go to next iteration
const { channel, webhook } = projectRocketChat;
const rocketchat = new URL(webhook);
var data = JSON.stringify({
channel: `#${channel}`,
attachments: [{
text: `${emoji} ${message}`,
color: color,
fields: [
{
"short": true,
"title": "Source",
"value": appId
}
],
}]
});
var options = {
hostname: rocketchat.hostname,
port: rocketchat.port,
path: rocketchat.pathname,
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Content-Length': data.length
}
};
var req = http.request(options, function(res) {
res.setEncoding('utf8');
});
req.on('error', function(e) {
logger.error(`problem with request: ${e.message}`);
});
req.write(data);
req.end();
});
channelWrapperLogs.ack(msg)
return
} | the_stack |
import { gql } from '@apollo/client';
import * as Apollo from '@apollo/client';
export type Maybe<T> = T | null;
export type InputMaybe<T> = Maybe<T>;
export type Exact<T extends { [key: string]: unknown }> = { [K in keyof T]: T[K] };
export type MakeOptional<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]?: Maybe<T[SubKey]> };
export type MakeMaybe<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]: Maybe<T[SubKey]> };
const defaultOptions = {}
/** All built-in and custom scalars, mapped to their actual values */
export type Scalars = {
ID: string;
String: string;
Boolean: boolean;
Int: number;
Float: number;
};
export type FieldError = {
__typename?: 'FieldError';
field: Scalars['String'];
message: Scalars['String'];
};
export type Folder = {
__typename?: 'Folder';
color: Scalars['String'];
createdAt: Scalars['String'];
creator: User;
creatorId: Scalars['Float'];
id: Scalars['Float'];
name: Scalars['String'];
noteIds: Array<Scalars['String']>;
updatedAt: Scalars['String'];
};
export type Mutation = {
__typename?: 'Mutation';
addNoteToFolder: Scalars['Boolean'];
changePassword: UserResponse;
createFolder: Scalars['Boolean'];
createNote: Note;
deleteFolder: Scalars['Boolean'];
deleteNote: Scalars['Boolean'];
deleteNoteFromFolder: Scalars['Boolean'];
forgotPassword: Scalars['Boolean'];
login: UserResponse;
logout: Scalars['Boolean'];
register: UserResponse;
updateName: Scalars['Boolean'];
updateNote: Scalars['Boolean'];
updateNoteTitle: Scalars['Boolean'];
};
export type MutationAddNoteToFolderArgs = {
folderId: Scalars['Int'];
noteId: Scalars['Int'];
};
export type MutationChangePasswordArgs = {
newPassword: Scalars['String'];
token: Scalars['String'];
};
export type MutationCreateFolderArgs = {
name: Scalars['String'];
};
export type MutationCreateNoteArgs = {
title: Scalars['String'];
};
export type MutationDeleteFolderArgs = {
id: Scalars['Int'];
};
export type MutationDeleteNoteArgs = {
id: Scalars['Int'];
};
export type MutationDeleteNoteFromFolderArgs = {
folderId: Scalars['Int'];
noteId: Scalars['Int'];
};
export type MutationForgotPasswordArgs = {
email: Scalars['String'];
};
export type MutationLoginArgs = {
email: Scalars['String'];
password: Scalars['String'];
};
export type MutationRegisterArgs = {
options: UserInput;
};
export type MutationUpdateNameArgs = {
name: Scalars['String'];
};
export type MutationUpdateNoteArgs = {
body: Scalars['String'];
id: Scalars['Int'];
};
export type MutationUpdateNoteTitleArgs = {
id: Scalars['Int'];
title: Scalars['String'];
};
export type Note = {
__typename?: 'Note';
body: Scalars['String'];
createdAt: Scalars['String'];
creator: User;
creatorId: Scalars['Float'];
id: Scalars['Float'];
status: Scalars['String'];
title: Scalars['String'];
updatedAt: Scalars['String'];
};
export type Query = {
__typename?: 'Query';
getNote: Note;
me?: Maybe<User>;
};
export type QueryGetNoteArgs = {
id: Scalars['Int'];
};
export type User = {
__typename?: 'User';
createdAt: Scalars['String'];
email: Scalars['String'];
folders: Array<Folder>;
id: Scalars['Float'];
imgUrl: Scalars['String'];
name: Scalars['String'];
notes: Array<Note>;
theme: Scalars['String'];
updatedAt: Scalars['String'];
};
export type UserInput = {
email: Scalars['String'];
name: Scalars['String'];
password: Scalars['String'];
};
export type UserResponse = {
__typename?: 'UserResponse';
errors?: Maybe<Array<FieldError>>;
user?: Maybe<User>;
};
export type RegularErrorFragment = { __typename?: 'FieldError', field: string, message: string };
export type RegularFolderFragment = { __typename?: 'Folder', id: number, name: string, color: string, noteIds: Array<string>, creatorId: number, createdAt: string, updatedAt: string };
export type RegularNoteFragment = { __typename?: 'Note', id: number, title: string, body: string, status: string, creatorId: number, createdAt: string, updatedAt: string };
export type RegularUserFragment = { __typename: 'User', id: number, name: string, theme: string, imgUrl: string, email: string, createdAt: string, updatedAt: string, notes: Array<{ __typename?: 'Note', id: number, title: string, body: string, status: string, creatorId: number, createdAt: string, updatedAt: string }>, folders: Array<{ __typename?: 'Folder', id: number, name: string, color: string, noteIds: Array<string>, creatorId: number, createdAt: string, updatedAt: string }> };
export type RegularUserResponseFragment = { __typename?: 'UserResponse', errors?: Array<{ __typename?: 'FieldError', field: string, message: string }> | null | undefined, user?: { __typename: 'User', id: number, name: string, theme: string, imgUrl: string, email: string, createdAt: string, updatedAt: string, notes: Array<{ __typename?: 'Note', id: number, title: string, body: string, status: string, creatorId: number, createdAt: string, updatedAt: string }>, folders: Array<{ __typename?: 'Folder', id: number, name: string, color: string, noteIds: Array<string>, creatorId: number, createdAt: string, updatedAt: string }> } | null | undefined };
export type AddNoteToFolderMutationVariables = Exact<{
folderId: Scalars['Int'];
noteId: Scalars['Int'];
}>;
export type AddNoteToFolderMutation = { __typename?: 'Mutation', addNoteToFolder: boolean };
export type ChangePasswordMutationVariables = Exact<{
token: Scalars['String'];
newPassword: Scalars['String'];
}>;
export type ChangePasswordMutation = { __typename?: 'Mutation', changePassword: { __typename?: 'UserResponse', errors?: Array<{ __typename?: 'FieldError', field: string, message: string }> | null | undefined, user?: { __typename: 'User', id: number, name: string, theme: string, imgUrl: string, email: string, createdAt: string, updatedAt: string, notes: Array<{ __typename?: 'Note', id: number, title: string, body: string, status: string, creatorId: number, createdAt: string, updatedAt: string }>, folders: Array<{ __typename?: 'Folder', id: number, name: string, color: string, noteIds: Array<string>, creatorId: number, createdAt: string, updatedAt: string }> } | null | undefined } };
export type CreateFolderMutationVariables = Exact<{
name: Scalars['String'];
}>;
export type CreateFolderMutation = { __typename?: 'Mutation', createFolder: boolean };
export type CreateNoteMutationVariables = Exact<{
title: Scalars['String'];
}>;
export type CreateNoteMutation = { __typename?: 'Mutation', createNote: { __typename?: 'Note', id: number, title: string, body: string, status: string, creatorId: number, createdAt: string, updatedAt: string } };
export type DeleteFolderMutationVariables = Exact<{
id: Scalars['Int'];
}>;
export type DeleteFolderMutation = { __typename?: 'Mutation', deleteFolder: boolean };
export type DeleteNoteMutationVariables = Exact<{
id: Scalars['Int'];
}>;
export type DeleteNoteMutation = { __typename?: 'Mutation', deleteNote: boolean };
export type DeleteNoteFromFolderMutationVariables = Exact<{
folderId: Scalars['Int'];
noteId: Scalars['Int'];
}>;
export type DeleteNoteFromFolderMutation = { __typename?: 'Mutation', deleteNoteFromFolder: boolean };
export type ForgotPasswordMutationVariables = Exact<{
email: Scalars['String'];
}>;
export type ForgotPasswordMutation = { __typename?: 'Mutation', forgotPassword: boolean };
export type LoginMutationVariables = Exact<{
email: Scalars['String'];
password: Scalars['String'];
}>;
export type LoginMutation = { __typename?: 'Mutation', login: { __typename?: 'UserResponse', errors?: Array<{ __typename?: 'FieldError', field: string, message: string }> | null | undefined, user?: { __typename: 'User', id: number, name: string, theme: string, imgUrl: string, email: string, createdAt: string, updatedAt: string, notes: Array<{ __typename?: 'Note', id: number, title: string, body: string, status: string, creatorId: number, createdAt: string, updatedAt: string }>, folders: Array<{ __typename?: 'Folder', id: number, name: string, color: string, noteIds: Array<string>, creatorId: number, createdAt: string, updatedAt: string }> } | null | undefined } };
export type LogoutMutationVariables = Exact<{ [key: string]: never; }>;
export type LogoutMutation = { __typename?: 'Mutation', logout: boolean };
export type RegisterMutationVariables = Exact<{
options: UserInput;
}>;
export type RegisterMutation = { __typename?: 'Mutation', register: { __typename?: 'UserResponse', errors?: Array<{ __typename?: 'FieldError', field: string, message: string }> | null | undefined, user?: { __typename: 'User', id: number, name: string, theme: string, imgUrl: string, email: string, createdAt: string, updatedAt: string, notes: Array<{ __typename?: 'Note', id: number, title: string, body: string, status: string, creatorId: number, createdAt: string, updatedAt: string }>, folders: Array<{ __typename?: 'Folder', id: number, name: string, color: string, noteIds: Array<string>, creatorId: number, createdAt: string, updatedAt: string }> } | null | undefined } };
export type UpdateNameMutationVariables = Exact<{
name: Scalars['String'];
}>;
export type UpdateNameMutation = { __typename?: 'Mutation', updateName: boolean };
export type UpdateNoteMutationVariables = Exact<{
id: Scalars['Int'];
body: Scalars['String'];
}>;
export type UpdateNoteMutation = { __typename?: 'Mutation', updateNote: boolean };
export type UpdateNoteTitleMutationVariables = Exact<{
id: Scalars['Int'];
title: Scalars['String'];
}>;
export type UpdateNoteTitleMutation = { __typename?: 'Mutation', updateNoteTitle: boolean };
export type GetNoteQueryVariables = Exact<{
id: Scalars['Int'];
}>;
export type GetNoteQuery = { __typename?: 'Query', getNote: { __typename?: 'Note', id: number, title: string, body: string, status: string, creatorId: number, createdAt: string, updatedAt: string } };
export type MeQueryVariables = Exact<{ [key: string]: never; }>;
export type MeQuery = { __typename?: 'Query', me?: { __typename: 'User', id: number, name: string, theme: string, imgUrl: string, email: string, createdAt: string, updatedAt: string, notes: Array<{ __typename?: 'Note', id: number, title: string, body: string, status: string, creatorId: number, createdAt: string, updatedAt: string }>, folders: Array<{ __typename?: 'Folder', id: number, name: string, color: string, noteIds: Array<string>, creatorId: number, createdAt: string, updatedAt: string }> } | null | undefined };
export const RegularErrorFragmentDoc = gql`
fragment RegularError on FieldError {
field
message
}
`;
export const RegularNoteFragmentDoc = gql`
fragment RegularNote on Note {
id
title
body
status
creatorId
createdAt
updatedAt
}
`;
export const RegularFolderFragmentDoc = gql`
fragment RegularFolder on Folder {
id
name
color
noteIds
creatorId
createdAt
updatedAt
}
`;
export const RegularUserFragmentDoc = gql`
fragment RegularUser on User {
id
name
theme
imgUrl
email
createdAt
updatedAt
notes {
...RegularNote
}
folders {
...RegularFolder
}
__typename
}
${RegularNoteFragmentDoc}
${RegularFolderFragmentDoc}`;
export const RegularUserResponseFragmentDoc = gql`
fragment RegularUserResponse on UserResponse {
errors {
...RegularError
}
user {
...RegularUser
}
}
${RegularErrorFragmentDoc}
${RegularUserFragmentDoc}`;
export const AddNoteToFolderDocument = gql`
mutation addNoteToFolder($folderId: Int!, $noteId: Int!) {
addNoteToFolder(folderId: $folderId, noteId: $noteId)
}
`;
export type AddNoteToFolderMutationFn = Apollo.MutationFunction<AddNoteToFolderMutation, AddNoteToFolderMutationVariables>;
/**
* __useAddNoteToFolderMutation__
*
* To run a mutation, you first call `useAddNoteToFolderMutation` within a React component and pass it any options that fit your needs.
* When your component renders, `useAddNoteToFolderMutation` returns a tuple that includes:
* - A mutate function that you can call at any time to execute the mutation
* - An object with fields that represent the current status of the mutation's execution
*
* @param baseOptions options that will be passed into the mutation, supported options are listed on: https://www.apollographql.com/docs/react/api/react-hooks/#options-2;
*
* @example
* const [addNoteToFolderMutation, { data, loading, error }] = useAddNoteToFolderMutation({
* variables: {
* folderId: // value for 'folderId'
* noteId: // value for 'noteId'
* },
* });
*/
export function useAddNoteToFolderMutation(baseOptions?: Apollo.MutationHookOptions<AddNoteToFolderMutation, AddNoteToFolderMutationVariables>) {
const options = {...defaultOptions, ...baseOptions}
return Apollo.useMutation<AddNoteToFolderMutation, AddNoteToFolderMutationVariables>(AddNoteToFolderDocument, options);
}
export type AddNoteToFolderMutationHookResult = ReturnType<typeof useAddNoteToFolderMutation>;
export type AddNoteToFolderMutationResult = Apollo.MutationResult<AddNoteToFolderMutation>;
export type AddNoteToFolderMutationOptions = Apollo.BaseMutationOptions<AddNoteToFolderMutation, AddNoteToFolderMutationVariables>;
export const ChangePasswordDocument = gql`
mutation changePassword($token: String!, $newPassword: String!) {
changePassword(token: $token, newPassword: $newPassword) {
...RegularUserResponse
}
}
${RegularUserResponseFragmentDoc}`;
export type ChangePasswordMutationFn = Apollo.MutationFunction<ChangePasswordMutation, ChangePasswordMutationVariables>;
/**
* __useChangePasswordMutation__
*
* To run a mutation, you first call `useChangePasswordMutation` within a React component and pass it any options that fit your needs.
* When your component renders, `useChangePasswordMutation` returns a tuple that includes:
* - A mutate function that you can call at any time to execute the mutation
* - An object with fields that represent the current status of the mutation's execution
*
* @param baseOptions options that will be passed into the mutation, supported options are listed on: https://www.apollographql.com/docs/react/api/react-hooks/#options-2;
*
* @example
* const [changePasswordMutation, { data, loading, error }] = useChangePasswordMutation({
* variables: {
* token: // value for 'token'
* newPassword: // value for 'newPassword'
* },
* });
*/
export function useChangePasswordMutation(baseOptions?: Apollo.MutationHookOptions<ChangePasswordMutation, ChangePasswordMutationVariables>) {
const options = {...defaultOptions, ...baseOptions}
return Apollo.useMutation<ChangePasswordMutation, ChangePasswordMutationVariables>(ChangePasswordDocument, options);
}
export type ChangePasswordMutationHookResult = ReturnType<typeof useChangePasswordMutation>;
export type ChangePasswordMutationResult = Apollo.MutationResult<ChangePasswordMutation>;
export type ChangePasswordMutationOptions = Apollo.BaseMutationOptions<ChangePasswordMutation, ChangePasswordMutationVariables>;
export const CreateFolderDocument = gql`
mutation createFolder($name: String!) {
createFolder(name: $name)
}
`;
export type CreateFolderMutationFn = Apollo.MutationFunction<CreateFolderMutation, CreateFolderMutationVariables>;
/**
* __useCreateFolderMutation__
*
* To run a mutation, you first call `useCreateFolderMutation` within a React component and pass it any options that fit your needs.
* When your component renders, `useCreateFolderMutation` returns a tuple that includes:
* - A mutate function that you can call at any time to execute the mutation
* - An object with fields that represent the current status of the mutation's execution
*
* @param baseOptions options that will be passed into the mutation, supported options are listed on: https://www.apollographql.com/docs/react/api/react-hooks/#options-2;
*
* @example
* const [createFolderMutation, { data, loading, error }] = useCreateFolderMutation({
* variables: {
* name: // value for 'name'
* },
* });
*/
export function useCreateFolderMutation(baseOptions?: Apollo.MutationHookOptions<CreateFolderMutation, CreateFolderMutationVariables>) {
const options = {...defaultOptions, ...baseOptions}
return Apollo.useMutation<CreateFolderMutation, CreateFolderMutationVariables>(CreateFolderDocument, options);
}
export type CreateFolderMutationHookResult = ReturnType<typeof useCreateFolderMutation>;
export type CreateFolderMutationResult = Apollo.MutationResult<CreateFolderMutation>;
export type CreateFolderMutationOptions = Apollo.BaseMutationOptions<CreateFolderMutation, CreateFolderMutationVariables>;
export const CreateNoteDocument = gql`
mutation createNote($title: String!) {
createNote(title: $title) {
...RegularNote
}
}
${RegularNoteFragmentDoc}`;
export type CreateNoteMutationFn = Apollo.MutationFunction<CreateNoteMutation, CreateNoteMutationVariables>;
/**
* __useCreateNoteMutation__
*
* To run a mutation, you first call `useCreateNoteMutation` within a React component and pass it any options that fit your needs.
* When your component renders, `useCreateNoteMutation` returns a tuple that includes:
* - A mutate function that you can call at any time to execute the mutation
* - An object with fields that represent the current status of the mutation's execution
*
* @param baseOptions options that will be passed into the mutation, supported options are listed on: https://www.apollographql.com/docs/react/api/react-hooks/#options-2;
*
* @example
* const [createNoteMutation, { data, loading, error }] = useCreateNoteMutation({
* variables: {
* title: // value for 'title'
* },
* });
*/
export function useCreateNoteMutation(baseOptions?: Apollo.MutationHookOptions<CreateNoteMutation, CreateNoteMutationVariables>) {
const options = {...defaultOptions, ...baseOptions}
return Apollo.useMutation<CreateNoteMutation, CreateNoteMutationVariables>(CreateNoteDocument, options);
}
export type CreateNoteMutationHookResult = ReturnType<typeof useCreateNoteMutation>;
export type CreateNoteMutationResult = Apollo.MutationResult<CreateNoteMutation>;
export type CreateNoteMutationOptions = Apollo.BaseMutationOptions<CreateNoteMutation, CreateNoteMutationVariables>;
export const DeleteFolderDocument = gql`
mutation deleteFolder($id: Int!) {
deleteFolder(id: $id)
}
`;
export type DeleteFolderMutationFn = Apollo.MutationFunction<DeleteFolderMutation, DeleteFolderMutationVariables>;
/**
* __useDeleteFolderMutation__
*
* To run a mutation, you first call `useDeleteFolderMutation` within a React component and pass it any options that fit your needs.
* When your component renders, `useDeleteFolderMutation` returns a tuple that includes:
* - A mutate function that you can call at any time to execute the mutation
* - An object with fields that represent the current status of the mutation's execution
*
* @param baseOptions options that will be passed into the mutation, supported options are listed on: https://www.apollographql.com/docs/react/api/react-hooks/#options-2;
*
* @example
* const [deleteFolderMutation, { data, loading, error }] = useDeleteFolderMutation({
* variables: {
* id: // value for 'id'
* },
* });
*/
export function useDeleteFolderMutation(baseOptions?: Apollo.MutationHookOptions<DeleteFolderMutation, DeleteFolderMutationVariables>) {
const options = {...defaultOptions, ...baseOptions}
return Apollo.useMutation<DeleteFolderMutation, DeleteFolderMutationVariables>(DeleteFolderDocument, options);
}
export type DeleteFolderMutationHookResult = ReturnType<typeof useDeleteFolderMutation>;
export type DeleteFolderMutationResult = Apollo.MutationResult<DeleteFolderMutation>;
export type DeleteFolderMutationOptions = Apollo.BaseMutationOptions<DeleteFolderMutation, DeleteFolderMutationVariables>;
export const DeleteNoteDocument = gql`
mutation deleteNote($id: Int!) {
deleteNote(id: $id)
}
`;
export type DeleteNoteMutationFn = Apollo.MutationFunction<DeleteNoteMutation, DeleteNoteMutationVariables>;
/**
* __useDeleteNoteMutation__
*
* To run a mutation, you first call `useDeleteNoteMutation` within a React component and pass it any options that fit your needs.
* When your component renders, `useDeleteNoteMutation` returns a tuple that includes:
* - A mutate function that you can call at any time to execute the mutation
* - An object with fields that represent the current status of the mutation's execution
*
* @param baseOptions options that will be passed into the mutation, supported options are listed on: https://www.apollographql.com/docs/react/api/react-hooks/#options-2;
*
* @example
* const [deleteNoteMutation, { data, loading, error }] = useDeleteNoteMutation({
* variables: {
* id: // value for 'id'
* },
* });
*/
export function useDeleteNoteMutation(baseOptions?: Apollo.MutationHookOptions<DeleteNoteMutation, DeleteNoteMutationVariables>) {
const options = {...defaultOptions, ...baseOptions}
return Apollo.useMutation<DeleteNoteMutation, DeleteNoteMutationVariables>(DeleteNoteDocument, options);
}
export type DeleteNoteMutationHookResult = ReturnType<typeof useDeleteNoteMutation>;
export type DeleteNoteMutationResult = Apollo.MutationResult<DeleteNoteMutation>;
export type DeleteNoteMutationOptions = Apollo.BaseMutationOptions<DeleteNoteMutation, DeleteNoteMutationVariables>;
export const DeleteNoteFromFolderDocument = gql`
mutation deleteNoteFromFolder($folderId: Int!, $noteId: Int!) {
deleteNoteFromFolder(folderId: $folderId, noteId: $noteId)
}
`;
export type DeleteNoteFromFolderMutationFn = Apollo.MutationFunction<DeleteNoteFromFolderMutation, DeleteNoteFromFolderMutationVariables>;
/**
* __useDeleteNoteFromFolderMutation__
*
* To run a mutation, you first call `useDeleteNoteFromFolderMutation` within a React component and pass it any options that fit your needs.
* When your component renders, `useDeleteNoteFromFolderMutation` returns a tuple that includes:
* - A mutate function that you can call at any time to execute the mutation
* - An object with fields that represent the current status of the mutation's execution
*
* @param baseOptions options that will be passed into the mutation, supported options are listed on: https://www.apollographql.com/docs/react/api/react-hooks/#options-2;
*
* @example
* const [deleteNoteFromFolderMutation, { data, loading, error }] = useDeleteNoteFromFolderMutation({
* variables: {
* folderId: // value for 'folderId'
* noteId: // value for 'noteId'
* },
* });
*/
export function useDeleteNoteFromFolderMutation(baseOptions?: Apollo.MutationHookOptions<DeleteNoteFromFolderMutation, DeleteNoteFromFolderMutationVariables>) {
const options = {...defaultOptions, ...baseOptions}
return Apollo.useMutation<DeleteNoteFromFolderMutation, DeleteNoteFromFolderMutationVariables>(DeleteNoteFromFolderDocument, options);
}
export type DeleteNoteFromFolderMutationHookResult = ReturnType<typeof useDeleteNoteFromFolderMutation>;
export type DeleteNoteFromFolderMutationResult = Apollo.MutationResult<DeleteNoteFromFolderMutation>;
export type DeleteNoteFromFolderMutationOptions = Apollo.BaseMutationOptions<DeleteNoteFromFolderMutation, DeleteNoteFromFolderMutationVariables>;
export const ForgotPasswordDocument = gql`
mutation ForgotPassword($email: String!) {
forgotPassword(email: $email)
}
`;
export type ForgotPasswordMutationFn = Apollo.MutationFunction<ForgotPasswordMutation, ForgotPasswordMutationVariables>;
/**
* __useForgotPasswordMutation__
*
* To run a mutation, you first call `useForgotPasswordMutation` within a React component and pass it any options that fit your needs.
* When your component renders, `useForgotPasswordMutation` returns a tuple that includes:
* - A mutate function that you can call at any time to execute the mutation
* - An object with fields that represent the current status of the mutation's execution
*
* @param baseOptions options that will be passed into the mutation, supported options are listed on: https://www.apollographql.com/docs/react/api/react-hooks/#options-2;
*
* @example
* const [forgotPasswordMutation, { data, loading, error }] = useForgotPasswordMutation({
* variables: {
* email: // value for 'email'
* },
* });
*/
export function useForgotPasswordMutation(baseOptions?: Apollo.MutationHookOptions<ForgotPasswordMutation, ForgotPasswordMutationVariables>) {
const options = {...defaultOptions, ...baseOptions}
return Apollo.useMutation<ForgotPasswordMutation, ForgotPasswordMutationVariables>(ForgotPasswordDocument, options);
}
export type ForgotPasswordMutationHookResult = ReturnType<typeof useForgotPasswordMutation>;
export type ForgotPasswordMutationResult = Apollo.MutationResult<ForgotPasswordMutation>;
export type ForgotPasswordMutationOptions = Apollo.BaseMutationOptions<ForgotPasswordMutation, ForgotPasswordMutationVariables>;
export const LoginDocument = gql`
mutation Login($email: String!, $password: String!) {
login(email: $email, password: $password) {
...RegularUserResponse
}
}
${RegularUserResponseFragmentDoc}`;
export type LoginMutationFn = Apollo.MutationFunction<LoginMutation, LoginMutationVariables>;
/**
* __useLoginMutation__
*
* To run a mutation, you first call `useLoginMutation` within a React component and pass it any options that fit your needs.
* When your component renders, `useLoginMutation` returns a tuple that includes:
* - A mutate function that you can call at any time to execute the mutation
* - An object with fields that represent the current status of the mutation's execution
*
* @param baseOptions options that will be passed into the mutation, supported options are listed on: https://www.apollographql.com/docs/react/api/react-hooks/#options-2;
*
* @example
* const [loginMutation, { data, loading, error }] = useLoginMutation({
* variables: {
* email: // value for 'email'
* password: // value for 'password'
* },
* });
*/
export function useLoginMutation(baseOptions?: Apollo.MutationHookOptions<LoginMutation, LoginMutationVariables>) {
const options = {...defaultOptions, ...baseOptions}
return Apollo.useMutation<LoginMutation, LoginMutationVariables>(LoginDocument, options);
}
export type LoginMutationHookResult = ReturnType<typeof useLoginMutation>;
export type LoginMutationResult = Apollo.MutationResult<LoginMutation>;
export type LoginMutationOptions = Apollo.BaseMutationOptions<LoginMutation, LoginMutationVariables>;
export const LogoutDocument = gql`
mutation Logout {
logout
}
`;
export type LogoutMutationFn = Apollo.MutationFunction<LogoutMutation, LogoutMutationVariables>;
/**
* __useLogoutMutation__
*
* To run a mutation, you first call `useLogoutMutation` within a React component and pass it any options that fit your needs.
* When your component renders, `useLogoutMutation` returns a tuple that includes:
* - A mutate function that you can call at any time to execute the mutation
* - An object with fields that represent the current status of the mutation's execution
*
* @param baseOptions options that will be passed into the mutation, supported options are listed on: https://www.apollographql.com/docs/react/api/react-hooks/#options-2;
*
* @example
* const [logoutMutation, { data, loading, error }] = useLogoutMutation({
* variables: {
* },
* });
*/
export function useLogoutMutation(baseOptions?: Apollo.MutationHookOptions<LogoutMutation, LogoutMutationVariables>) {
const options = {...defaultOptions, ...baseOptions}
return Apollo.useMutation<LogoutMutation, LogoutMutationVariables>(LogoutDocument, options);
}
export type LogoutMutationHookResult = ReturnType<typeof useLogoutMutation>;
export type LogoutMutationResult = Apollo.MutationResult<LogoutMutation>;
export type LogoutMutationOptions = Apollo.BaseMutationOptions<LogoutMutation, LogoutMutationVariables>;
export const RegisterDocument = gql`
mutation Register($options: UserInput!) {
register(options: $options) {
...RegularUserResponse
}
}
${RegularUserResponseFragmentDoc}`;
export type RegisterMutationFn = Apollo.MutationFunction<RegisterMutation, RegisterMutationVariables>;
/**
* __useRegisterMutation__
*
* To run a mutation, you first call `useRegisterMutation` within a React component and pass it any options that fit your needs.
* When your component renders, `useRegisterMutation` returns a tuple that includes:
* - A mutate function that you can call at any time to execute the mutation
* - An object with fields that represent the current status of the mutation's execution
*
* @param baseOptions options that will be passed into the mutation, supported options are listed on: https://www.apollographql.com/docs/react/api/react-hooks/#options-2;
*
* @example
* const [registerMutation, { data, loading, error }] = useRegisterMutation({
* variables: {
* options: // value for 'options'
* },
* });
*/
export function useRegisterMutation(baseOptions?: Apollo.MutationHookOptions<RegisterMutation, RegisterMutationVariables>) {
const options = {...defaultOptions, ...baseOptions}
return Apollo.useMutation<RegisterMutation, RegisterMutationVariables>(RegisterDocument, options);
}
export type RegisterMutationHookResult = ReturnType<typeof useRegisterMutation>;
export type RegisterMutationResult = Apollo.MutationResult<RegisterMutation>;
export type RegisterMutationOptions = Apollo.BaseMutationOptions<RegisterMutation, RegisterMutationVariables>;
export const UpdateNameDocument = gql`
mutation updateName($name: String!) {
updateName(name: $name)
}
`;
export type UpdateNameMutationFn = Apollo.MutationFunction<UpdateNameMutation, UpdateNameMutationVariables>;
/**
* __useUpdateNameMutation__
*
* To run a mutation, you first call `useUpdateNameMutation` within a React component and pass it any options that fit your needs.
* When your component renders, `useUpdateNameMutation` returns a tuple that includes:
* - A mutate function that you can call at any time to execute the mutation
* - An object with fields that represent the current status of the mutation's execution
*
* @param baseOptions options that will be passed into the mutation, supported options are listed on: https://www.apollographql.com/docs/react/api/react-hooks/#options-2;
*
* @example
* const [updateNameMutation, { data, loading, error }] = useUpdateNameMutation({
* variables: {
* name: // value for 'name'
* },
* });
*/
export function useUpdateNameMutation(baseOptions?: Apollo.MutationHookOptions<UpdateNameMutation, UpdateNameMutationVariables>) {
const options = {...defaultOptions, ...baseOptions}
return Apollo.useMutation<UpdateNameMutation, UpdateNameMutationVariables>(UpdateNameDocument, options);
}
export type UpdateNameMutationHookResult = ReturnType<typeof useUpdateNameMutation>;
export type UpdateNameMutationResult = Apollo.MutationResult<UpdateNameMutation>;
export type UpdateNameMutationOptions = Apollo.BaseMutationOptions<UpdateNameMutation, UpdateNameMutationVariables>;
export const UpdateNoteDocument = gql`
mutation updateNote($id: Int!, $body: String!) {
updateNote(id: $id, body: $body)
}
`;
export type UpdateNoteMutationFn = Apollo.MutationFunction<UpdateNoteMutation, UpdateNoteMutationVariables>;
/**
* __useUpdateNoteMutation__
*
* To run a mutation, you first call `useUpdateNoteMutation` within a React component and pass it any options that fit your needs.
* When your component renders, `useUpdateNoteMutation` returns a tuple that includes:
* - A mutate function that you can call at any time to execute the mutation
* - An object with fields that represent the current status of the mutation's execution
*
* @param baseOptions options that will be passed into the mutation, supported options are listed on: https://www.apollographql.com/docs/react/api/react-hooks/#options-2;
*
* @example
* const [updateNoteMutation, { data, loading, error }] = useUpdateNoteMutation({
* variables: {
* id: // value for 'id'
* body: // value for 'body'
* },
* });
*/
export function useUpdateNoteMutation(baseOptions?: Apollo.MutationHookOptions<UpdateNoteMutation, UpdateNoteMutationVariables>) {
const options = {...defaultOptions, ...baseOptions}
return Apollo.useMutation<UpdateNoteMutation, UpdateNoteMutationVariables>(UpdateNoteDocument, options);
}
export type UpdateNoteMutationHookResult = ReturnType<typeof useUpdateNoteMutation>;
export type UpdateNoteMutationResult = Apollo.MutationResult<UpdateNoteMutation>;
export type UpdateNoteMutationOptions = Apollo.BaseMutationOptions<UpdateNoteMutation, UpdateNoteMutationVariables>;
export const UpdateNoteTitleDocument = gql`
mutation updateNoteTitle($id: Int!, $title: String!) {
updateNoteTitle(id: $id, title: $title)
}
`;
export type UpdateNoteTitleMutationFn = Apollo.MutationFunction<UpdateNoteTitleMutation, UpdateNoteTitleMutationVariables>;
/**
* __useUpdateNoteTitleMutation__
*
* To run a mutation, you first call `useUpdateNoteTitleMutation` within a React component and pass it any options that fit your needs.
* When your component renders, `useUpdateNoteTitleMutation` returns a tuple that includes:
* - A mutate function that you can call at any time to execute the mutation
* - An object with fields that represent the current status of the mutation's execution
*
* @param baseOptions options that will be passed into the mutation, supported options are listed on: https://www.apollographql.com/docs/react/api/react-hooks/#options-2;
*
* @example
* const [updateNoteTitleMutation, { data, loading, error }] = useUpdateNoteTitleMutation({
* variables: {
* id: // value for 'id'
* title: // value for 'title'
* },
* });
*/
export function useUpdateNoteTitleMutation(baseOptions?: Apollo.MutationHookOptions<UpdateNoteTitleMutation, UpdateNoteTitleMutationVariables>) {
const options = {...defaultOptions, ...baseOptions}
return Apollo.useMutation<UpdateNoteTitleMutation, UpdateNoteTitleMutationVariables>(UpdateNoteTitleDocument, options);
}
export type UpdateNoteTitleMutationHookResult = ReturnType<typeof useUpdateNoteTitleMutation>;
export type UpdateNoteTitleMutationResult = Apollo.MutationResult<UpdateNoteTitleMutation>;
export type UpdateNoteTitleMutationOptions = Apollo.BaseMutationOptions<UpdateNoteTitleMutation, UpdateNoteTitleMutationVariables>;
export const GetNoteDocument = gql`
query getNote($id: Int!) {
getNote(id: $id) {
...RegularNote
}
}
${RegularNoteFragmentDoc}`;
/**
* __useGetNoteQuery__
*
* To run a query within a React component, call `useGetNoteQuery` and pass it any options that fit your needs.
* When your component renders, `useGetNoteQuery` returns an object from Apollo Client that contains loading, error, and data properties
* you can use to render your UI.
*
* @param baseOptions options that will be passed into the query, supported options are listed on: https://www.apollographql.com/docs/react/api/react-hooks/#options;
*
* @example
* const { data, loading, error } = useGetNoteQuery({
* variables: {
* id: // value for 'id'
* },
* });
*/
export function useGetNoteQuery(baseOptions: Apollo.QueryHookOptions<GetNoteQuery, GetNoteQueryVariables>) {
const options = {...defaultOptions, ...baseOptions}
return Apollo.useQuery<GetNoteQuery, GetNoteQueryVariables>(GetNoteDocument, options);
}
export function useGetNoteLazyQuery(baseOptions?: Apollo.LazyQueryHookOptions<GetNoteQuery, GetNoteQueryVariables>) {
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery<GetNoteQuery, GetNoteQueryVariables>(GetNoteDocument, options);
}
export type GetNoteQueryHookResult = ReturnType<typeof useGetNoteQuery>;
export type GetNoteLazyQueryHookResult = ReturnType<typeof useGetNoteLazyQuery>;
export type GetNoteQueryResult = Apollo.QueryResult<GetNoteQuery, GetNoteQueryVariables>;
export const MeDocument = gql`
query Me {
me {
...RegularUser
}
}
${RegularUserFragmentDoc}`;
/**
* __useMeQuery__
*
* To run a query within a React component, call `useMeQuery` and pass it any options that fit your needs.
* When your component renders, `useMeQuery` returns an object from Apollo Client that contains loading, error, and data properties
* you can use to render your UI.
*
* @param baseOptions options that will be passed into the query, supported options are listed on: https://www.apollographql.com/docs/react/api/react-hooks/#options;
*
* @example
* const { data, loading, error } = useMeQuery({
* variables: {
* },
* });
*/
export function useMeQuery(baseOptions?: Apollo.QueryHookOptions<MeQuery, MeQueryVariables>) {
const options = {...defaultOptions, ...baseOptions}
return Apollo.useQuery<MeQuery, MeQueryVariables>(MeDocument, options);
}
export function useMeLazyQuery(baseOptions?: Apollo.LazyQueryHookOptions<MeQuery, MeQueryVariables>) {
const options = {...defaultOptions, ...baseOptions}
return Apollo.useLazyQuery<MeQuery, MeQueryVariables>(MeDocument, options);
}
export type MeQueryHookResult = ReturnType<typeof useMeQuery>;
export type MeLazyQueryHookResult = ReturnType<typeof useMeLazyQuery>;
export type MeQueryResult = Apollo.QueryResult<MeQuery, MeQueryVariables>; | the_stack |
import './task.css';
import { ResourceData, ResourceVersionData, Versions } from '../../tekton-hub-client';
import { createDiv, createSpan } from '../common/dom-util';
import { BaseWidget } from '../common/widget';
import { VSMessage } from '../common/vscode-api';
import * as hljs from 'highlight.js/lib/core';
import { HubTaskInstallation, HubTaskUninstall, InstalledTask, isInstalledTask } from '../../hub/hub-common';
import * as yaml from 'highlight.js/lib/languages/yaml';
import {CodeLineNumbers} from 'code-line-numbers';
import * as semver from 'semver';
// eslint-disable-next-line @typescript-eslint/no-var-requires
const taskSvg = require('../hub/task.svg');
import * as MarkdownIt from 'markdown-it';
import './yaml-highlight.css';
// eslint-disable-next-line @typescript-eslint/no-explicit-any
hljs.registerLanguage('yaml', yaml as any);
export class TaskWidget extends BaseWidget {
private task: ResourceData;
private currentVersion: ResourceVersionData;
private md: MarkdownIt;
private readmePage: HTMLDivElement;
private yamlPage: HTMLDivElement;
private versionsSelect: HTMLSelectElement;
private versions: Versions | undefined;
private tknVersion: string | undefined;
private installButton: HTMLAnchorElement;
constructor(private root: HTMLElement, private messageSender: VSMessage) {
super();
this.element = createDiv('task-page');
root.appendChild(this.element);
this.md = new MarkdownIt();
}
showTask(task: ResourceData | InstalledTask, tknVersion: string): void {
this.task = task;
if (isInstalledTask(task)){
this.currentVersion = task.installedVersion;
} else {
this.currentVersion = task.latestVersion;
}
this.versions = undefined;
this.tknVersion = tknVersion;
this.updatePage();
}
setVersions(versions: Versions): void {
this.versions = versions;
this.fillVersions(this.currentVersion);
}
private fillVersions(currentVersion: ResourceVersionData): void {
for (const ver of this.versions.versions){
const opt = document.createElement('option');
opt.value = ver.version;
console.error(this.task);
if (ver.version === this.task.latestVersion.version){
opt.text = ver.version + ' (latest)';
} else {
opt.text = ver.version;
}
if (currentVersion.version === ver.version){
opt.selected = true;
}
this.versionsSelect.add(opt);
}
}
private updatePage(): void {
this.element.innerHTML = '';
const header = createDiv('header');
this.element.appendChild(header);
const body = createDiv('body');
this.element.appendChild(body);
this.createHeader(header);
this.createBody(body);
}
private createHeader(header: HTMLDivElement): void {
const iconContainer = createDiv('icon-container');
const image = document.createElement('img');
image.src = taskSvg;
image.classList.add('icon');
iconContainer.appendChild(image);
header.appendChild(iconContainer);
const details = createDiv('details');
header.appendChild(details);
const title = createDiv('title');
details.appendChild(title);
const name = document.createElement('a');
name.classList.add('name');
title.appendChild(name);
name.textContent = this.currentVersion.displayName ? this.currentVersion.displayName : this.task.name;
name.href = `https://hub.tekton.dev/${this.task.catalog.name}/task/${this.task.name}`;
const subtitle = createDiv('subtitle');
details.appendChild(subtitle);
// Catalog
const catalogContainer = createDiv('subtitle-entry');
subtitle.appendChild(catalogContainer);
const catalog = createSpan('catalog');
catalogContainer.appendChild(catalog);
catalog.textContent = this.task.catalog.name;
// Ratings
const ratingsContainer = createDiv('subtitle-entry');
subtitle.appendChild(ratingsContainer);
const ratings = createSpan('rating');
ratingsContainer.appendChild(ratings);
const star = createSpan('codicon', 'codicon-star-full');
ratings.appendChild(star);
const count = createSpan('count');
count.innerText = this.task.rating.toString();
ratings.appendChild(count);
// Repository
const repositoryContainer = createDiv('subtitle-entry');
const repolink = document.createElement('a');
repolink.href = this.currentVersion.webURL;
repolink.textContent = 'Repository';
repolink.classList.add('repository');
repositoryContainer.appendChild(repolink);
subtitle.appendChild(repositoryContainer);
//Description
const description = createDiv('description');
description.textContent = this.currentVersion.description ? this.currentVersion.description?.split('\n')[0] : this.task.latestVersion.description?.split('\n')[0];
details.appendChild(description);
//Actions
const actions = createDiv('actions');
const actionBar = createDiv('action-bar');
actions.appendChild(actionBar);
const actionsContainer = document.createElement('ul');
actionsContainer.classList.add('actions-container');
actionBar.appendChild(actionsContainer);
// Versions Select
const versionsLi = document.createElement('li');
versionsLi.classList.add('action-item');
this.versionsSelect = document.createElement('select');
this.versionsSelect.classList.add('extension-action', 'versions-select');
versionsLi.appendChild(this.versionsSelect);
actionsContainer.appendChild(versionsLi);
if (!this.versions) {
this.messageSender.postMessage({type:'getVersions', data: this.task.id})
} else {
this.fillVersions(this.currentVersion);
}
this.versionsSelect.onchange = () => {
this.versionChanged(this.versionsSelect.selectedOptions.item(0).value);
};
// install button
if (!isInstalledTask(this.task) || this.currentVersion.version !== this.task.installedVersion.version){
this.addInstallButton(actionsContainer);
} else {
this.addUninstallButton(actionsContainer);
}
// TKN Version
if (this.tknVersion){
this.addVersionCheck(actionsContainer);
}
if (isInstalledTask(this.task) && this.task.clusterTask) {
const clusterTaskContainer = document.createElement('li');
clusterTaskContainer.classList.add('action-item');
const clusterTaskLabel = createSpan('cluster-task-label');
clusterTaskLabel.textContent = 'ClusterTask';
clusterTaskContainer.appendChild(clusterTaskLabel);
actionsContainer.appendChild(clusterTaskContainer);
}
details.appendChild(actions);
}
private addInstallButton(actionsContainer: HTMLUListElement): void {
const installLi = document.createElement('li');
installLi.classList.add('action-item', 'action-dropdown-item');
const installButton = document.createElement('a');
installButton.classList.add('action-label', 'codicon', 'extension-action', 'label', 'uninstall', 'action-dropdown');
installButton.textContent = 'Install';
installButton.onclick = () => {
if (installButton.textContent !== 'Install') {
return;
}
this.sendInstall();
installButton.textContent = 'Installing';
}
this.installButton = installButton;
installLi.appendChild(installButton);
const tknDropdown = createDiv('monaco-dropdown');
const dropdownLabel = createDiv('dropdown-label');
const dropdownButton = document.createElement('a');
dropdownButton.classList.add('action-label', 'dropdown', 'codicon-chevron-down', 'extension-action', 'label', 'action-dropdown', 'codicon');
dropdownButton.onclick = (e) => {
e.stopPropagation();
this.showInstallOptions(dropdownButton);
}
dropdownLabel.appendChild(dropdownButton);
tknDropdown.appendChild(dropdownLabel);
installLi.appendChild(tknDropdown);
actionsContainer.appendChild(installLi);
}
private addUninstallButton(actionsContainer: HTMLUListElement): void {
const uninstallLi = document.createElement('li');
uninstallLi.classList.add('action-item');
const uninstallButton = document.createElement('a');
uninstallButton.classList.add('action-label', 'codicon', 'extension-action', 'label', 'uninstall');
uninstallButton.textContent = 'Uninstall';
uninstallButton.onclick = () => {
if (uninstallButton.textContent !== 'Uninstall'){
return;
}
this.sendUninstall();
uninstallButton.textContent = 'Uninstalling';
}
uninstallLi.appendChild(uninstallButton);
actionsContainer.appendChild(uninstallLi);
}
private sendInstall(asCluster = false): void {
this.messageSender.postMessage({type: 'installTask', data: {
url: this.currentVersion.rawURL,
name: this.task.name,
minPipelinesVersion: this.currentVersion.minPipelinesVersion,
tknVersion: this.tknVersion,
asClusterTask: asCluster,
taskVersion: this.currentVersion
} as HubTaskInstallation});
}
private sendUninstall(): void {
if (isInstalledTask(this.task)) {
this.messageSender.postMessage({type: 'uninstallTask', data: {
clusterTask: this.task.clusterTask,
name: this.task.name
} as HubTaskUninstall});
}
}
private showInstallOptions(parent: HTMLAnchorElement): void {
const rmenu = document.getElementById('rmenu');
rmenu.className = 'show';
rmenu.style.top = parent.getBoundingClientRect().bottom + 'px';
rmenu.style.left = parent.getBoundingClientRect().left + 'px';
rmenu.innerHTML = '';
const installCluster = document.createElement('a');
installCluster.text = 'Install as ClusterTask';
rmenu.appendChild(installCluster);
installCluster.onclick = () => {
this.sendInstall(true);
document.getElementById('rmenu').className = 'hide';
}
const docClickListener = (): void => {
document.getElementById('rmenu').className = 'hide';
document.removeEventListener('click',docClickListener);
};
document.addEventListener('click', docClickListener);
}
private addVersionCheck(container: HTMLUListElement): void {
if (this.currentVersion.minPipelinesVersion) {
if (semver.lt(this.tknVersion, this.currentVersion.minPipelinesVersion)){
const versionWarning = document.createElement('li');
versionWarning.classList.add('action-item');
const warning = createSpan('action-label', 'codicon', 'warning');
warning.textContent = 'Incompatible';
warning.title = `This task requires Tekton Pipelines >= ${this.task.latestVersion.minPipelinesVersion} and is incompatible with the version of Tekton Pipelines installed on your cluster.`;
versionWarning.appendChild(warning);
container.appendChild(versionWarning);
}
}
}
private createBody(body: HTMLDivElement): void {
const navbar = createDiv('navbar');
body.appendChild(navbar);
this.createNavButtons(navbar);
const content = createDiv('content');
body.appendChild(content);
this.readmePage = createDiv('page');
content.appendChild(this.readmePage);
// readmePage.hidden = true;
this.renderReadme(this.readmePage);
this.yamlPage = createDiv('page');
this.yamlPage.hidden = true;
content.appendChild(this.yamlPage);
this.renderYaml(this.yamlPage);
}
private createNavButtons(navbar: HTMLDivElement): void {
const actionBar = createDiv('action-bar');
navbar.appendChild(actionBar);
const actionContainer = document.createElement('ul');
actionContainer.className = 'actions-container';
actionBar.appendChild(actionContainer);
const readmeAction = document.createElement('li');
readmeAction.className = 'action-item';
actionContainer.appendChild(readmeAction);
const reademA = document.createElement('a');
reademA.classList.add('action-label', 'checked');
reademA.textContent = 'Description';
readmeAction.appendChild(reademA);
const yamlAction = document.createElement('li');
yamlAction.className = 'action-item';
actionContainer.appendChild(yamlAction);
const yamlA = document.createElement('a');
yamlA.classList.add('action-label');
yamlA.textContent = 'YAML';
yamlAction.appendChild(yamlA);
reademA.onclick = () => {
this.switchPages(this.readmePage, this.yamlPage, reademA, yamlA);
}
yamlA.onclick = () => {
this.switchPages(this.yamlPage, this.readmePage, yamlA , reademA);
}
}
private switchPages(newActivePage: HTMLDivElement, activePage: HTMLDivElement, newActiveButton: HTMLAnchorElement, activeButton: HTMLAnchorElement): void {
activePage.hidden = true;
activeButton.classList.remove('checked');
newActivePage.hidden = false;
newActiveButton.classList.add('checked');
}
private async renderReadme(content: HTMLDivElement): Promise<void> {
let readmeUrl = this.currentVersion.rawURL;
readmeUrl = readmeUrl.substring(0, readmeUrl.lastIndexOf('/'));
readmeUrl = readmeUrl + '/README.md';
const xhr = new XMLHttpRequest();
xhr.open('GET', readmeUrl);
xhr.send();
xhr.onload = () => {
content.innerHTML = this.md.render(xhr.response);
}
xhr.onerror = () => {
console.error(`Cannot load README.md -> ${xhr.status}: ${xhr.statusText}`);
}
}
private async renderYaml(content: HTMLDivElement): Promise<void> {
const yamlUrl = this.currentVersion.rawURL;
const xhr = new XMLHttpRequest();
xhr.open('GET', yamlUrl);
xhr.send();
xhr.onload = () => {
content.innerHTML = '';
const pre = document.createElement('pre');
content.appendChild(pre);
const code = document.createElement('code');
code.classList.add('language-yaml');
pre.appendChild(code);
code.innerHTML = hljs.highlight('yaml', xhr.response).value;
CodeLineNumbers.addLineNumbersTo(code);
}
xhr.onerror = () => {
console.error(`Cannot load README.md -> ${xhr.status}: ${xhr.statusText}`);
}
}
private versionChanged(versionId: string): void {
for (const ver of this.versions.versions) {
if (versionId === ver.version){
this.messageSender.postMessage({type: 'getTask', data: ver.id})
return;
}
}
}
setTaskVersion(task: ResourceVersionData): void {
this.currentVersion = task;
this.updatePage();
}
cancelInstall(): void {
this.installButton.textContent = 'Install';
}
} | the_stack |
import type * as Platform from '../../core/platform/platform.js';
import * as i18n from '../../core/i18n/i18n.js';
import * as SDK from '../../core/sdk/sdk.js';
import * as LitHtml from '../../ui/lit-html/lit-html.js';
import * as ReportView from '../../ui/components/report_view/report_view.js';
import * as UI from '../../ui/legacy/legacy.js';
import * as Protocol from '../../generated/protocol.js';
import * as IconButton from '../../ui/components/icon_button/icon_button.js';
import {NotRestoredReasonDescription} from './BackForwardCacheStrings.js';
import backForwardCacheViewStyles from './backForwardCacheView.css.js';
const UIStrings = {
/**
* @description Title text in Back-forward Cache view of the Application panel
*/
mainFrame: 'Main Frame',
/**
* @description Section header text in Back-forward Cache view of the Application panel
*/
lastMainFrameNavigation: 'Last Main Frame Navigation',
/**
* @description Title text in Back-forward Cache view of the Application panel
*/
backForwardCacheTitle: 'Back-forward Cache',
/**
* @description Status text for the status of the main frame
*/
unavailable: 'unavailable',
/**
* @description Entry name text in the Back-forward Cache view of the Application panel
*/
url: 'URL',
/**
* @description Entry name text in the Back-forward Cache view of the Application panel
*/
bfcacheStatus: 'Back-forward Cache Status',
/**
* @description Status text for the status of the back-forward cache status
*/
unknown: 'unknown',
/**
* @description Status text for the status of the back-forward cache status indicating that
* the back-forward cache was not used and a normal navigation occured instead.
*/
normalNavigation: 'Normal navigation (Not restored from back-forward cache)',
/**
* @description Status text for the status of the back-forward cache status indicating that
* the back-forward cache was used to restore the page instead of reloading it.
*/
restoredFromBFCache: 'Restored from back-forward cache',
/**
* @description Label for a list of reasons which prevent the page from being eligible for
* back-forward cache. These reasons are actionable i.e. they can be cleaned up to make the
* page eligible for back-forward cache.
*/
pageSupportNeeded: 'Actionable',
/**
* @description Explanation for actionable items which prevent the page from being eligible
* for back-forward cache.
*/
pageSupportNeededExplanation:
'These reasons are actionable i.e. they can be cleaned up to make the page eligible for back-forward cache.',
/**
* @description Label for a list of reasons which prevent the page from being eligible for
* back-forward cache. These reasons are circumstantial / not actionable i.e. they cannot be
* cleaned up by developers to make the page eligible for back-forward cache.
*/
circumstantial: 'Not Actionable',
/**
* @description Explanation for circumstantial/non-actionable items which prevent the page from being eligible
* for back-forward cache.
*/
circumstantialExplanation:
'These reasons are not actionable i.e. caching was prevented by something outside of the direct control of the page.',
/**
* @description Label for a list of reasons which prevent the page from being eligible for
* back-forward cache. These reasons are pending support by chrome i.e. in a future version
* of chrome they will not prevent back-forward cache usage anymore.
*/
supportPending: 'Pending Support',
/**
* @description Explanation for 'pending support' items which prevent the page from being eligible
* for back-forward cache.
*/
supportPendingExplanation:
'Chrome support for these reasons is pending i.e. they will not prevent the page from being eligible for back-forward cache in a future version of Chrome.',
};
const str_ = i18n.i18n.registerUIStrings('panels/application/BackForwardCacheView.ts', UIStrings);
const i18nString = i18n.i18n.getLocalizedString.bind(undefined, str_);
export class BackForwardCacheView extends UI.ThrottledWidget.ThrottledWidget {
constructor() {
super(true, 1000);
this.getMainResourceTreeModel()?.addEventListener(
SDK.ResourceTreeModel.Events.MainFrameNavigated, this.onBackForwardCacheUpdate, this);
this.getMainResourceTreeModel()?.addEventListener(
SDK.ResourceTreeModel.Events.BackForwardCacheDetailsUpdated, this.onBackForwardCacheUpdate, this);
this.update();
}
wasShown(): void {
super.wasShown();
this.registerCSSFiles([backForwardCacheViewStyles]);
}
private onBackForwardCacheUpdate(): void {
this.update();
}
async doUpdate(): Promise<void> {
const data = {reportTitle: i18nString(UIStrings.backForwardCacheTitle)};
const html = LitHtml.html`
<${ReportView.ReportView.Report.litTagName} .data=${data as ReportView.ReportView.ReportData}>
${this.renderMainFrameInformation(this.getMainFrame())}
</${ReportView.ReportView.Report.litTagName}>
`;
LitHtml.render(html, this.contentElement, {host: this});
}
private getMainResourceTreeModel(): SDK.ResourceTreeModel.ResourceTreeModel|null {
const mainTarget = SDK.TargetManager.TargetManager.instance().mainTarget();
return mainTarget?.model(SDK.ResourceTreeModel.ResourceTreeModel) || null;
}
private getMainFrame(): SDK.ResourceTreeModel.ResourceTreeFrame|null {
return this.getMainResourceTreeModel()?.mainFrame || null;
}
private renderMainFrameInformation(mainFrame: SDK.ResourceTreeModel.ResourceTreeFrame|null): LitHtml.TemplateResult {
if (!mainFrame) {
return LitHtml.html`<${ReportView.ReportView.ReportKey.litTagName}>${i18nString(UIStrings.mainFrame)}</${
ReportView.ReportView.ReportKey.litTagName}>
<${ReportView.ReportView.ReportValue.litTagName}>
${i18nString(UIStrings.unavailable)}
</${ReportView.ReportView.ReportValue.litTagName}>`;
}
return LitHtml.html`
<${ReportView.ReportView.ReportSectionHeader.litTagName}>${i18nString(UIStrings.lastMainFrameNavigation)}</${
ReportView.ReportView.ReportSectionHeader.litTagName}>
<${ReportView.ReportView.ReportKey.litTagName}>${i18nString(UIStrings.url)}</${
ReportView.ReportView.ReportKey.litTagName}>
<${ReportView.ReportView.ReportValue.litTagName}>${mainFrame.url}</${
ReportView.ReportView.ReportValue.litTagName}>
<${ReportView.ReportView.ReportKey.litTagName}>${i18nString(UIStrings.bfcacheStatus)}</${
ReportView.ReportView.ReportKey.litTagName}>
<${ReportView.ReportView.ReportValue.litTagName}>${
this.renderBackForwardCacheStatus(
mainFrame.backForwardCacheDetails.restoredFromCache)}</${ReportView.ReportView.ReportValue.litTagName}>
${this.maybeRenderExplanations(mainFrame.backForwardCacheDetails.explanations)}
`;
}
private renderBackForwardCacheStatus(status: boolean|undefined): Platform.UIString.LocalizedString {
switch (status) {
case true:
return i18nString(UIStrings.restoredFromBFCache);
case false:
return i18nString(UIStrings.normalNavigation);
}
return i18nString(UIStrings.unknown);
}
private maybeRenderExplanations(explanations: Protocol.Page.BackForwardCacheNotRestoredExplanation[]):
LitHtml.TemplateResult|{} {
if (explanations.length === 0) {
return LitHtml.nothing;
}
const pageSupportNeeded = explanations.filter(
explanation => explanation.type === Protocol.Page.BackForwardCacheNotRestoredReasonType.PageSupportNeeded);
const supportPending = explanations.filter(
explanation => explanation.type === Protocol.Page.BackForwardCacheNotRestoredReasonType.SupportPending);
const circumstantial = explanations.filter(
explanation => explanation.type === Protocol.Page.BackForwardCacheNotRestoredReasonType.Circumstantial);
// Disabled until https://crbug.com/1079231 is fixed.
// clang-format off
return LitHtml.html`
${this.renderExplanations(i18nString(UIStrings.pageSupportNeeded), i18nString(UIStrings.pageSupportNeededExplanation), pageSupportNeeded)}
${this.renderExplanations(i18nString(UIStrings.supportPending), i18nString(UIStrings.supportPendingExplanation), supportPending)}
${this.renderExplanations(i18nString(UIStrings.circumstantial), i18nString(UIStrings.circumstantialExplanation), circumstantial)}
`;
// clang-format on
}
private renderExplanations(
category: Platform.UIString.LocalizedString, explainerText: Platform.UIString.LocalizedString,
explanations: Protocol.Page.BackForwardCacheNotRestoredExplanation[]): LitHtml.TemplateResult {
// Disabled until https://crbug.com/1079231 is fixed.
// clang-format off
return LitHtml.html`
${explanations.length > 0 ? LitHtml.html`
<${ReportView.ReportView.ReportKey.litTagName}>
${category}
<${IconButton.Icon.Icon.litTagName} class="inline-icon" .data=${{
iconName: 'help_outline',
color: 'var(--color-text-secondary)',
width: '16px',
height: '16px',
} as IconButton.Icon.IconData} title=${explainerText}></${IconButton.Icon.Icon.litTagName}>
</${ReportView.ReportView.ReportKey.litTagName}>
<${ReportView.ReportView.ReportValue.litTagName}>
<ul class='not-restored-reason-list'>${explanations.map(explanation => this.renderReason(explanation))}</ul>
</${ReportView.ReportView.ReportValue.litTagName}>
` : LitHtml.nothing}
`;
// clang-format on
}
private renderReason(explanation: Protocol.Page.BackForwardCacheNotRestoredExplanation): LitHtml.TemplateResult {
return LitHtml.html`
<li>${explanation.reason} : ${
(explanation.reason in NotRestoredReasonDescription) ?
LitHtml.html`${NotRestoredReasonDescription[explanation.reason].name()}` :
LitHtml.nothing} </li>
`;
}
} | the_stack |
import generate from '@babel/generator';
import template from '@babel/template';
import * as t from '@babel/types';
import type { NodePath, Visitor } from '@babel/traverse';
import traverse from '@babel/traverse';
import { unique } from '@compiled/utils';
import { transformCss } from '@compiled/css';
import isPropValid from '@emotion/is-prop-valid';
import type { Tag } from '../types';
import { getItemCss } from './css-builders';
import { pickFunctionBody, resolveIdentifierComingFromDestructuring } from './ast';
import type { Metadata } from '../types';
import type { CSSOutput, CssItem } from '../utils/types';
import { PROPS_IDENTIFIER_NAME } from '../constants';
export interface StyledTemplateOpts {
/**
* Class to be used for the CSS selector.
*/
classNames: t.Expression[];
/**
* Tag for the Styled Component, for example "div" or user defined component.
*/
tag: Tag;
/**
* CSS variables to be passed to the `style` prop.
*/
variables: CSSOutput['variables'];
/**
* CSS sheets to be passed to the `CS` component.
*/
sheets: string[];
}
/**
* Hoists a sheet to the top of the module if its not already there.
* Returns the referencing identifier.
*
* @param sheet {string} Stylesheet
* @param meta {Metadata} Useful metadata that can be used during the transformation
*/
const hoistSheet = (sheet: string, meta: Metadata): t.Identifier => {
if (meta.state.sheets[sheet]) {
return meta.state.sheets[sheet];
}
const sheetIdentifier = meta.parentPath.scope.generateUidIdentifier('');
const parent = meta.parentPath.findParent((path) => path.isProgram());
const parentBody = parent && (parent.get('body') as NodePath[]);
const path = parentBody && parentBody.filter((path) => !path.isImportDeclaration())[0];
if (path) {
const kind = 'const';
const newVariable = t.variableDeclarator(sheetIdentifier, t.stringLiteral(sheet));
path.insertBefore(t.variableDeclaration(kind, [newVariable])).forEach((newVariable) => {
// Register the binding so it's now available in scope.
meta.parentPath.scope.registerBinding(kind, newVariable as NodePath<t.Node>);
});
}
meta.state.sheets[sheet] = sheetIdentifier;
return sheetIdentifier;
};
/**
* Will build up the CSS variables prop to be placed as inline styles.
*
* @param variables CSS variables that will be placed in the AST
* @param transform Transform function that can be used to change the CSS variable expression
*/
export const buildCssVariablesProp = (
variables: CSSOutput['variables'],
transform = (expression: t.Expression) => expression
): (t.ObjectProperty | t.SpreadElement)[] => {
return unique(
// Make sure all defined CSS variables are unique
variables,
// We consider their uniqueness based on their name
(item) => item.name
).map((variable) => {
// Map them into object properties.
return t.objectProperty(
t.stringLiteral(variable.name),
t.callExpression(
t.identifier('ix'),
[
// Allow callers to transform the expression if needed,
// for example the styled API strips away the arrow function.
transform(variable.expression),
(variable.suffix && t.stringLiteral(variable.suffix)) as t.Expression,
(variable.suffix && variable.prefix && t.stringLiteral(variable.prefix)) as t.Expression,
].filter(Boolean)
)
);
});
};
/**
* Builds up the inline style prop value for a Styled Component.
*
* @param variables CSS variables that will be placed in the AST
* @param transform Transform callback function that can be used to change the CSS variable expression
*/
const styledStyleProp = (
variables: CSSOutput['variables'],
transform?: (expression: t.Expression) => t.Expression
) => {
const props: (t.ObjectProperty | t.SpreadElement)[] = [t.spreadElement(t.identifier('style'))];
return t.objectExpression(props.concat(buildCssVariablesProp(variables, transform)));
};
/**
* Returns a tag string in the form of an identifier or string literal.
*
* A type of InBuiltComponent will return a string literal,
* otherwise an identifier string will be returned.
*
* @param tag Made of name and type.
*/
const buildComponentTag = ({ name, type }: Tag) => {
return type === 'InBuiltComponent' ? `"${name}"` : name;
};
/**
* Traverses an arrow function and then finally return the arrow function body node.
*
* @param node Array function node
* @param nestedVisitor Visitor callback function
*/
const traverseStyledArrowFunctionExpression = (
node: t.ArrowFunctionExpression,
nestedVisitor: Visitor
) => {
traverse(node, nestedVisitor);
return pickFunctionBody(node);
};
/**
* Traverses a binary expression looking for any arrow functions,
* calls back with each arrow function node into the passed in `nestedVisitor`,
* and then finally replaces each found arrow function node with its body.
*
* @param node Binary expression node
* @param nestedVisitor Visitor callback function
*/
const traverseStyledBinaryExpression = (node: t.BinaryExpression, nestedVisitor: Visitor) => {
traverse(node, {
noScope: true,
ArrowFunctionExpression(path) {
path.traverse(nestedVisitor);
path.replaceWith(pickFunctionBody(path.node));
path.stop();
},
});
return node;
};
/**
* Handles cases like:
* 1. `propz.loading` in `border-color: \${(propz) => (propz.loading ? colors.N100 : colors.N200)};`
* Outcome: It will replace `propz.loading` with `props.loading`.
*
* 2. `props.notValidProp` in `border-color: \${(props) => (props.notValidProp ? colors.N100 : colors.N200)};`
* Outcome: It will move `notValidProp` under `propsToDestructure` and replaces `props.notValidProp` with `notValidProp`.
*
* @param path MemberExpression path
*/
const handleMemberExpressionInStyledInterpolation = (path: NodePath<t.MemberExpression>) => {
const memberExpressionKey = path.node.object;
const propsToDestructure: string[] = [];
if (t.isIdentifier(memberExpressionKey)) {
const traversedUpFunctionPath: NodePath<t.Node> | null = path.find((parentPath) =>
parentPath.isFunction()
);
const memberExpressionKeyName = memberExpressionKey.name;
const isMemberExpressionNameTheSameAsFunctionFirstParam: boolean | null =
traversedUpFunctionPath &&
t.isFunction(traversedUpFunctionPath.node) &&
t.isIdentifier(traversedUpFunctionPath.node.params[0]) &&
traversedUpFunctionPath.node.params[0].name === memberExpressionKeyName;
if (isMemberExpressionNameTheSameAsFunctionFirstParam) {
const memberExpressionValue = path.node.property;
if (t.isIdentifier(memberExpressionValue)) {
const memberExpressionValueName = memberExpressionValue.name;
// if valid html attribute let it through - else destructure to prevent
if (isPropValid(memberExpressionValueName)) {
// Convert cases like propz.color to props.color
if (memberExpressionKeyName !== PROPS_IDENTIFIER_NAME) {
path.replaceWith(
t.memberExpression(
t.identifier(PROPS_IDENTIFIER_NAME),
t.identifier(memberExpressionValueName)
)
);
}
} else {
propsToDestructure.push(memberExpressionValueName);
path.replaceWith(memberExpressionValue);
}
}
}
}
return propsToDestructure;
};
/**
* Handles cases like:
* 1. `isLoading` in `background-color: \${({ isLoading }) => (isLoading ? colors.N20 : colors.N40)};`
* Outcome: It will move `isLoading` under `propsToDestructure`.
*
* 2. `l` in `color: \${({ loading: l }) => (l ? colors.N50 : colors.N10)};`
* Outcome: It will move `loading` under `propsToDestructure` and replaces `l` with `loading`.
*
* @param path Identifier path
*/
const handleDestructuringInStyledInterpolation = (path: NodePath<t.Identifier>) => {
const propsToDestructure: string[] = [];
// We are not interested in parent object property like `({ loading: load }) => load`.
// Both `: load` and `=> load` are identifiers and function is parent for both.
// We are not interested in modifying `: load`. We just need to modify `=> load` to `=> loading`.
// If we don't skip, `=> load` will not be modified because we have modified `: load` earlier and
// second identifier is nowhere to be found inside function params.
if (path.parentPath && !t.isObjectProperty(path.parentPath.node)) {
const traversedUpFunctionPath: NodePath<t.Node> | null = path.find((parentPath) =>
parentPath.isFunction()
);
const firstFunctionParam =
traversedUpFunctionPath &&
t.isFunction(traversedUpFunctionPath.node) &&
traversedUpFunctionPath.node.params[0];
const resolvedDestructuringIdentifier = resolveIdentifierComingFromDestructuring({
name: path.node.name,
node: firstFunctionParam as t.Expression,
resolveFor: 'value',
});
if (resolvedDestructuringIdentifier && t.isIdentifier(resolvedDestructuringIdentifier.key)) {
const resolvedDestructuringIdentifierKey = resolvedDestructuringIdentifier.key;
const resolvedDestructuringIdentifierKeyName = resolvedDestructuringIdentifierKey.name;
propsToDestructure.push(resolvedDestructuringIdentifierKeyName);
// We are only interested in cases when names are different otherwise this will go in infinite recursion.
if (resolvedDestructuringIdentifierKeyName !== path.node.name) {
path.replaceWith(t.identifier(resolvedDestructuringIdentifierKeyName));
}
}
}
return propsToDestructure;
};
const displayNameTemplate = template(
`
if (process.env.NODE_ENV !== 'production') {
%%identifier%%.displayName = %%displayName%%;
}
`,
{ syntacticPlaceholders: true }
);
/**
* Assigns a display name string to the identifier.
*
* @param identifier
* @param displayName
* @returns
*/
export const buildDisplayName = (identifier: string, displayName: string = identifier): t.Node => {
return displayNameTemplate({
identifier,
displayName: `'${displayName}'`,
}) as t.Node;
};
/**
* Will return a generated AST for a Styled Component.
*
* @param opts {StyledTemplateOpts} Template options
* @param meta {Metadata} Useful metadata that can be used during the transformation
*/
const styledTemplate = (opts: StyledTemplateOpts, meta: Metadata): t.Node => {
const nonceAttribute = meta.state.opts.nonce ? `nonce={${meta.state.opts.nonce}}` : '';
const propsToDestructure: string[] = [];
const styleProp = opts.variables.length
? styledStyleProp(opts.variables, (node) => {
const nestedArrowFunctionExpressionVisitor = {
noScope: true,
MemberExpression(path: NodePath<t.MemberExpression>) {
const propsToDestructureFromMemberExpression = handleMemberExpressionInStyledInterpolation(
path
);
propsToDestructure.push(...propsToDestructureFromMemberExpression);
},
Identifier(path: NodePath<t.Identifier>) {
const propsToDestructureFromIdentifier = handleDestructuringInStyledInterpolation(path);
propsToDestructure.push(...propsToDestructureFromIdentifier);
},
};
if (t.isArrowFunctionExpression(node)) {
return traverseStyledArrowFunctionExpression(node, nestedArrowFunctionExpressionVisitor);
}
if (t.isBinaryExpression(node)) {
return traverseStyledBinaryExpression(node, nestedArrowFunctionExpressionVisitor);
}
return node;
})
: t.identifier('style');
let unconditionalClassNames = '',
logicalClassNames = '';
opts.classNames.forEach((item) => {
if (t.isStringLiteral(item)) {
unconditionalClassNames += `${item.value} `;
} else if (t.isLogicalExpression(item)) {
logicalClassNames += `${generate(item).code}, `;
}
});
const classNames = `"${unconditionalClassNames.trim()}", ${logicalClassNames}`;
return template(
`
forwardRef(({
as: C = ${buildComponentTag(opts.tag)},
style,
${unique(propsToDestructure)
.map((prop) => prop + ',')
.join('')}
...${PROPS_IDENTIFIER_NAME}
}, ref) => (
<CC>
<CS ${nonceAttribute}>{%%cssNode%%}</CS>
<C
{...${PROPS_IDENTIFIER_NAME}}
style={%%styleProp%%}
ref={ref}
className={ax([${classNames} ${PROPS_IDENTIFIER_NAME}.className])}
/>
</CC>
));
`,
{
plugins: ['jsx'],
}
)({
styleProp,
cssNode: t.arrayExpression(unique(opts.sheets).map((sheet) => hoistSheet(sheet, meta))),
}) as t.Node;
};
const getKeyAttribute = (node: t.Expression): t.JSXAttribute | void => {
if (!t.isJSXElement(node)) {
return;
}
const isKeyAttribute = (
attribute: t.JSXAttribute | t.JSXSpreadAttribute
): attribute is t.JSXAttribute => t.isJSXAttribute(attribute) && attribute.name.name === 'key';
const keyAttribute = node.openingElement.attributes.find(isKeyAttribute);
if (!keyAttribute || !t.isJSXExpressionContainer(keyAttribute.value)) {
return;
}
return keyAttribute;
};
/**
* Will return a generated AST for a Compiled Component.
* This is primarily used for CSS prop and ClassNames apis.
*
* @param node Originating node
* @param sheets {string[]} Stylesheets
* @param meta {Metadata} Useful metadata that can be used during the transformation
*/
export const compiledTemplate = (node: t.Expression, sheets: string[], meta: Metadata): t.Node => {
const nonceAttribute = meta.state.opts.nonce ? `nonce={${meta.state.opts.nonce}}` : '';
const keyAttribute = getKeyAttribute(node);
return template(
`
<CC ${keyAttribute ? generate(keyAttribute).code : ''}>
<CS ${nonceAttribute}>{%%cssNode%%}</CS>
{%%jsxNode%%}
</CC>
`,
{
plugins: ['jsx'],
}
)({
jsxNode: node,
cssNode: t.arrayExpression(unique(sheets).map((sheet) => hoistSheet(sheet, meta))),
}) as t.Node;
};
/**
* Will join two expressions together,
* Looks like `left + ' ' + right`.
*
* @param left Any node on the left
* @param right Any node on the right
* @param spacer Optional spacer node to place between the left and right node. Defaults to a space string.
*/
export const joinExpressions = (
left: t.Expression,
right: t.Expression,
spacer: any = t.stringLiteral(' ')
): t.BinaryExpression => {
return t.binaryExpression('+', left, spacer ? t.binaryExpression('+', spacer, right) : right);
};
/**
* Will conditionally join two expressions together depending on the right expression.
* Looks like: `left + right ? ' ' + right : ''`
*/
export const conditionallyJoinExpressions = (
left: t.Expression,
right: t.Expression
): t.BinaryExpression => {
return t.binaryExpression(
'+',
left,
t.conditionalExpression(
right,
t.binaryExpression('+', t.stringLiteral(' '), right),
t.stringLiteral('')
)
);
};
/**
* Returns a Styled Component AST.
*
* @param tag {Tag} Styled tag either an inbuilt or user define
* @param cssOutput {CSSOutput} CSS and variables to place onto the component
* @param meta {Metadata} Useful metadata that can be used during the transformation
*/
export const buildStyledComponent = (tag: Tag, cssOutput: CSSOutput, meta: Metadata): t.Node => {
const unconditionalCss: string[] = [];
const logicalCss: CssItem[] = [];
cssOutput.css.forEach((item) => {
if (item.type === 'logical') {
logicalCss.push(item);
} else {
unconditionalCss.push(getItemCss(item));
}
});
// Rely on transformCss to remove duplicates and return only the last unconditional CSS for each property
const uniqueUnconditionalCssOutput = transformCss(unconditionalCss.join(''));
// Rely on transformItemCss to build logicalExpressions for logical CSS
const logicalCssOutput = transformItemCss({ css: logicalCss, variables: cssOutput.variables });
const sheets = [...uniqueUnconditionalCssOutput.sheets, ...logicalCssOutput.sheets];
const classNames = [
...[t.stringLiteral(uniqueUnconditionalCssOutput.classNames.join(' '))],
...logicalCssOutput.classNames,
];
return styledTemplate(
{
classNames,
tag,
sheets,
variables: cssOutput.variables,
},
meta
);
};
/**
* Wrapper to make defining import specifiers easier.
* If `localName` is defined it will rename the import to it,
* e.g: `name as localName`.
*
* @param name import name
* @param localName local name
*/
export const importSpecifier = (name: string, localName?: string): t.ImportSpecifier => {
return t.importSpecifier(t.identifier(name), t.identifier(localName || name));
};
/**
* Returns the actual value of a jsx value.
*
* @param node
*/
export const getPropValue = (
node: t.JSXElement | t.JSXFragment | t.StringLiteral | t.JSXExpressionContainer
): t.Expression => {
const value = t.isJSXExpressionContainer(node) ? node.expression : node;
if (t.isJSXEmptyExpression(value)) {
throw new Error('Empty expression not supported.');
}
return value;
};
/**
* Transforms CSS output into `sheets` and `classNames` ASTs.
*
* @param cssOutput {CSSOutput}
*/
const transformItemCss = (cssOutput: CSSOutput) => {
const sheets: string[] = [];
const classNames: t.Expression[] = [];
cssOutput.css.forEach((item) => {
const css = transformCss(getItemCss(item));
const className = css.classNames.join(' ');
sheets.push(...css.sheets);
switch (item.type) {
case 'logical':
classNames.push(
t.logicalExpression(item.operator, item.expression, t.stringLiteral(className))
);
break;
default:
if (className) {
classNames.push(t.stringLiteral(className));
}
break;
}
});
return { sheets, classNames };
};
/**
* Returns a Compiled Component AST.
*
* @param node Originating node
* @param cssOutput CSS and variables to place onto the component
* @param meta {Metadata} Useful metadata that can be used during the transformation
*/
export const buildCompiledComponent = (
node: t.JSXElement,
cssOutput: CSSOutput,
meta: Metadata
): t.Node => {
const { sheets, classNames } = transformItemCss(cssOutput);
const classNameProp = node.openingElement.attributes.find((prop): prop is t.JSXAttribute => {
return t.isJSXAttribute(prop) && prop.name.name === 'className';
});
if (classNameProp && classNameProp.value) {
// If there is a class name prop statically defined we want to concatenate it with
// the class name we're going to put on it.
const classNameExpression = getPropValue(classNameProp.value);
const values: t.Expression[] = classNames.concat(classNameExpression);
classNameProp.value = t.jsxExpressionContainer(
t.callExpression(t.identifier('ax'), [t.arrayExpression(values)])
);
} else {
// No class name - just push our own one.
node.openingElement.attributes.push(
t.jsxAttribute(
t.jsxIdentifier('className'),
t.jsxExpressionContainer(
t.callExpression(t.identifier('ax'), [t.arrayExpression(classNames)])
)
)
);
}
if (cssOutput.variables.length) {
// If there is dynamic CSS in use we have work to do.
let stylePropIndex = -1;
// Find the style prop on the opening JSX element.
const styleProp = node.openingElement.attributes.find((prop, index): prop is t.JSXAttribute => {
if (t.isJSXAttribute(prop) && prop.name.name === 'style') {
stylePropIndex = index;
return true;
}
return false;
});
const dynamicStyleProperties = buildCssVariablesProp(cssOutput.variables);
if (styleProp) {
// Remove the pre-existing style prop - we're going to redefine it soon.
node.openingElement.attributes.splice(stylePropIndex, 1);
if (
styleProp.value &&
t.isJSXExpressionContainer(styleProp.value) &&
!t.isJSXEmptyExpression(styleProp.value.expression)
) {
// If it's not an object we just spread the expression into the object
if (!t.isObjectExpression(styleProp.value.expression)) {
dynamicStyleProperties.splice(0, 0, t.spreadElement(styleProp.value.expression));
} else {
// Else it's an object! So we want to place each property into the object
styleProp.value.expression.properties.forEach((prop, index) => {
if (t.isObjectMethod(prop)) {
return;
}
// We want to keep the order that they were defined in.
// So we're using index here to do just that.
dynamicStyleProperties.splice(index, 0, prop);
});
}
}
}
// Finally add the new style prop back to the opening JSX element.
node.openingElement.attributes.push(
t.jsxAttribute(
t.jsxIdentifier('style'),
t.jsxExpressionContainer(t.objectExpression(dynamicStyleProperties))
)
);
}
return compiledTemplate(node, sheets, meta);
}; | the_stack |
import * as msRest from "@azure/ms-rest-js";
import * as msRestAzure from "@azure/ms-rest-azure-js";
import * as Models from "../models";
import * as Mappers from "../models/workspaceCollectionsMappers";
import * as Parameters from "../models/parameters";
import { PowerBIEmbeddedManagementClientContext } from "../powerBIEmbeddedManagementClientContext";
/** Class representing a WorkspaceCollections. */
export class WorkspaceCollections {
private readonly client: PowerBIEmbeddedManagementClientContext;
/**
* Create a WorkspaceCollections.
* @param {PowerBIEmbeddedManagementClientContext} client Reference to the service client.
*/
constructor(client: PowerBIEmbeddedManagementClientContext) {
this.client = client;
}
/**
* Retrieves an existing Power BI Workspace Collection.
* @param resourceGroupName Azure resource group
* @param workspaceCollectionName Power BI Embedded Workspace Collection name
* @param [options] The optional parameters
* @returns Promise<Models.WorkspaceCollectionsGetByNameResponse>
*/
getByName(resourceGroupName: string, workspaceCollectionName: string, options?: msRest.RequestOptionsBase): Promise<Models.WorkspaceCollectionsGetByNameResponse>;
/**
* @param resourceGroupName Azure resource group
* @param workspaceCollectionName Power BI Embedded Workspace Collection name
* @param callback The callback
*/
getByName(resourceGroupName: string, workspaceCollectionName: string, callback: msRest.ServiceCallback<Models.WorkspaceCollection>): void;
/**
* @param resourceGroupName Azure resource group
* @param workspaceCollectionName Power BI Embedded Workspace Collection name
* @param options The optional parameters
* @param callback The callback
*/
getByName(resourceGroupName: string, workspaceCollectionName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.WorkspaceCollection>): void;
getByName(resourceGroupName: string, workspaceCollectionName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.WorkspaceCollection>, callback?: msRest.ServiceCallback<Models.WorkspaceCollection>): Promise<Models.WorkspaceCollectionsGetByNameResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
workspaceCollectionName,
options
},
getByNameOperationSpec,
callback) as Promise<Models.WorkspaceCollectionsGetByNameResponse>;
}
/**
* Creates a new Power BI Workspace Collection with the specified properties. A Power BI Workspace
* Collection contains one or more workspaces, and can be used to provision keys that provide API
* access to those workspaces.
* @param resourceGroupName Azure resource group
* @param workspaceCollectionName Power BI Embedded Workspace Collection name
* @param body Create workspace collection request
* @param [options] The optional parameters
* @returns Promise<Models.WorkspaceCollectionsCreateResponse>
*/
create(resourceGroupName: string, workspaceCollectionName: string, body: Models.CreateWorkspaceCollectionRequest, options?: msRest.RequestOptionsBase): Promise<Models.WorkspaceCollectionsCreateResponse>;
/**
* @param resourceGroupName Azure resource group
* @param workspaceCollectionName Power BI Embedded Workspace Collection name
* @param body Create workspace collection request
* @param callback The callback
*/
create(resourceGroupName: string, workspaceCollectionName: string, body: Models.CreateWorkspaceCollectionRequest, callback: msRest.ServiceCallback<Models.WorkspaceCollection>): void;
/**
* @param resourceGroupName Azure resource group
* @param workspaceCollectionName Power BI Embedded Workspace Collection name
* @param body Create workspace collection request
* @param options The optional parameters
* @param callback The callback
*/
create(resourceGroupName: string, workspaceCollectionName: string, body: Models.CreateWorkspaceCollectionRequest, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.WorkspaceCollection>): void;
create(resourceGroupName: string, workspaceCollectionName: string, body: Models.CreateWorkspaceCollectionRequest, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.WorkspaceCollection>, callback?: msRest.ServiceCallback<Models.WorkspaceCollection>): Promise<Models.WorkspaceCollectionsCreateResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
workspaceCollectionName,
body,
options
},
createOperationSpec,
callback) as Promise<Models.WorkspaceCollectionsCreateResponse>;
}
/**
* Update an existing Power BI Workspace Collection with the specified properties.
* @param resourceGroupName Azure resource group
* @param workspaceCollectionName Power BI Embedded Workspace Collection name
* @param body Update workspace collection request
* @param [options] The optional parameters
* @returns Promise<Models.WorkspaceCollectionsUpdateResponse>
*/
update(resourceGroupName: string, workspaceCollectionName: string, body: Models.UpdateWorkspaceCollectionRequest, options?: msRest.RequestOptionsBase): Promise<Models.WorkspaceCollectionsUpdateResponse>;
/**
* @param resourceGroupName Azure resource group
* @param workspaceCollectionName Power BI Embedded Workspace Collection name
* @param body Update workspace collection request
* @param callback The callback
*/
update(resourceGroupName: string, workspaceCollectionName: string, body: Models.UpdateWorkspaceCollectionRequest, callback: msRest.ServiceCallback<Models.WorkspaceCollection>): void;
/**
* @param resourceGroupName Azure resource group
* @param workspaceCollectionName Power BI Embedded Workspace Collection name
* @param body Update workspace collection request
* @param options The optional parameters
* @param callback The callback
*/
update(resourceGroupName: string, workspaceCollectionName: string, body: Models.UpdateWorkspaceCollectionRequest, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.WorkspaceCollection>): void;
update(resourceGroupName: string, workspaceCollectionName: string, body: Models.UpdateWorkspaceCollectionRequest, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.WorkspaceCollection>, callback?: msRest.ServiceCallback<Models.WorkspaceCollection>): Promise<Models.WorkspaceCollectionsUpdateResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
workspaceCollectionName,
body,
options
},
updateOperationSpec,
callback) as Promise<Models.WorkspaceCollectionsUpdateResponse>;
}
/**
* Delete a Power BI Workspace Collection.
* @param resourceGroupName Azure resource group
* @param workspaceCollectionName Power BI Embedded Workspace Collection name
* @param [options] The optional parameters
* @returns Promise<msRest.RestResponse>
*/
deleteMethod(resourceGroupName: string, workspaceCollectionName: string, options?: msRest.RequestOptionsBase): Promise<msRest.RestResponse> {
return this.beginDeleteMethod(resourceGroupName,workspaceCollectionName,options)
.then(lroPoller => lroPoller.pollUntilFinished());
}
/**
* Verify the specified Power BI Workspace Collection name is valid and not already in use.
* @param location Azure location
* @param body Check name availability request
* @param [options] The optional parameters
* @returns Promise<Models.WorkspaceCollectionsCheckNameAvailabilityResponse>
*/
checkNameAvailability(location: string, body: Models.CheckNameRequest, options?: msRest.RequestOptionsBase): Promise<Models.WorkspaceCollectionsCheckNameAvailabilityResponse>;
/**
* @param location Azure location
* @param body Check name availability request
* @param callback The callback
*/
checkNameAvailability(location: string, body: Models.CheckNameRequest, callback: msRest.ServiceCallback<Models.CheckNameResponse>): void;
/**
* @param location Azure location
* @param body Check name availability request
* @param options The optional parameters
* @param callback The callback
*/
checkNameAvailability(location: string, body: Models.CheckNameRequest, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.CheckNameResponse>): void;
checkNameAvailability(location: string, body: Models.CheckNameRequest, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.CheckNameResponse>, callback?: msRest.ServiceCallback<Models.CheckNameResponse>): Promise<Models.WorkspaceCollectionsCheckNameAvailabilityResponse> {
return this.client.sendOperationRequest(
{
location,
body,
options
},
checkNameAvailabilityOperationSpec,
callback) as Promise<Models.WorkspaceCollectionsCheckNameAvailabilityResponse>;
}
/**
* Retrieves all existing Power BI workspace collections in the specified resource group.
* @param resourceGroupName Azure resource group
* @param [options] The optional parameters
* @returns Promise<Models.WorkspaceCollectionsListByResourceGroupResponse>
*/
listByResourceGroup(resourceGroupName: string, options?: msRest.RequestOptionsBase): Promise<Models.WorkspaceCollectionsListByResourceGroupResponse>;
/**
* @param resourceGroupName Azure resource group
* @param callback The callback
*/
listByResourceGroup(resourceGroupName: string, callback: msRest.ServiceCallback<Models.WorkspaceCollectionList>): void;
/**
* @param resourceGroupName Azure resource group
* @param options The optional parameters
* @param callback The callback
*/
listByResourceGroup(resourceGroupName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.WorkspaceCollectionList>): void;
listByResourceGroup(resourceGroupName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.WorkspaceCollectionList>, callback?: msRest.ServiceCallback<Models.WorkspaceCollectionList>): Promise<Models.WorkspaceCollectionsListByResourceGroupResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
options
},
listByResourceGroupOperationSpec,
callback) as Promise<Models.WorkspaceCollectionsListByResourceGroupResponse>;
}
/**
* Retrieves all existing Power BI workspace collections in the specified subscription.
* @param [options] The optional parameters
* @returns Promise<Models.WorkspaceCollectionsListBySubscriptionResponse>
*/
listBySubscription(options?: msRest.RequestOptionsBase): Promise<Models.WorkspaceCollectionsListBySubscriptionResponse>;
/**
* @param callback The callback
*/
listBySubscription(callback: msRest.ServiceCallback<Models.WorkspaceCollectionList>): void;
/**
* @param options The optional parameters
* @param callback The callback
*/
listBySubscription(options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.WorkspaceCollectionList>): void;
listBySubscription(options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.WorkspaceCollectionList>, callback?: msRest.ServiceCallback<Models.WorkspaceCollectionList>): Promise<Models.WorkspaceCollectionsListBySubscriptionResponse> {
return this.client.sendOperationRequest(
{
options
},
listBySubscriptionOperationSpec,
callback) as Promise<Models.WorkspaceCollectionsListBySubscriptionResponse>;
}
/**
* Retrieves the primary and secondary access keys for the specified Power BI Workspace Collection.
* @param resourceGroupName Azure resource group
* @param workspaceCollectionName Power BI Embedded Workspace Collection name
* @param [options] The optional parameters
* @returns Promise<Models.WorkspaceCollectionsGetAccessKeysResponse>
*/
getAccessKeys(resourceGroupName: string, workspaceCollectionName: string, options?: msRest.RequestOptionsBase): Promise<Models.WorkspaceCollectionsGetAccessKeysResponse>;
/**
* @param resourceGroupName Azure resource group
* @param workspaceCollectionName Power BI Embedded Workspace Collection name
* @param callback The callback
*/
getAccessKeys(resourceGroupName: string, workspaceCollectionName: string, callback: msRest.ServiceCallback<Models.WorkspaceCollectionAccessKeys>): void;
/**
* @param resourceGroupName Azure resource group
* @param workspaceCollectionName Power BI Embedded Workspace Collection name
* @param options The optional parameters
* @param callback The callback
*/
getAccessKeys(resourceGroupName: string, workspaceCollectionName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.WorkspaceCollectionAccessKeys>): void;
getAccessKeys(resourceGroupName: string, workspaceCollectionName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.WorkspaceCollectionAccessKeys>, callback?: msRest.ServiceCallback<Models.WorkspaceCollectionAccessKeys>): Promise<Models.WorkspaceCollectionsGetAccessKeysResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
workspaceCollectionName,
options
},
getAccessKeysOperationSpec,
callback) as Promise<Models.WorkspaceCollectionsGetAccessKeysResponse>;
}
/**
* Regenerates the primary or secondary access key for the specified Power BI Workspace Collection.
* @param resourceGroupName Azure resource group
* @param workspaceCollectionName Power BI Embedded Workspace Collection name
* @param body Access key to regenerate
* @param [options] The optional parameters
* @returns Promise<Models.WorkspaceCollectionsRegenerateKeyResponse>
*/
regenerateKey(resourceGroupName: string, workspaceCollectionName: string, body: Models.WorkspaceCollectionAccessKey, options?: msRest.RequestOptionsBase): Promise<Models.WorkspaceCollectionsRegenerateKeyResponse>;
/**
* @param resourceGroupName Azure resource group
* @param workspaceCollectionName Power BI Embedded Workspace Collection name
* @param body Access key to regenerate
* @param callback The callback
*/
regenerateKey(resourceGroupName: string, workspaceCollectionName: string, body: Models.WorkspaceCollectionAccessKey, callback: msRest.ServiceCallback<Models.WorkspaceCollectionAccessKeys>): void;
/**
* @param resourceGroupName Azure resource group
* @param workspaceCollectionName Power BI Embedded Workspace Collection name
* @param body Access key to regenerate
* @param options The optional parameters
* @param callback The callback
*/
regenerateKey(resourceGroupName: string, workspaceCollectionName: string, body: Models.WorkspaceCollectionAccessKey, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.WorkspaceCollectionAccessKeys>): void;
regenerateKey(resourceGroupName: string, workspaceCollectionName: string, body: Models.WorkspaceCollectionAccessKey, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.WorkspaceCollectionAccessKeys>, callback?: msRest.ServiceCallback<Models.WorkspaceCollectionAccessKeys>): Promise<Models.WorkspaceCollectionsRegenerateKeyResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
workspaceCollectionName,
body,
options
},
regenerateKeyOperationSpec,
callback) as Promise<Models.WorkspaceCollectionsRegenerateKeyResponse>;
}
/**
* Migrates an existing Power BI Workspace Collection to a different resource group and/or
* subscription.
* @param resourceGroupName Azure resource group
* @param body Workspace migration request
* @param [options] The optional parameters
* @returns Promise<msRest.RestResponse>
*/
migrate(resourceGroupName: string, body: Models.MigrateWorkspaceCollectionRequest, options?: msRest.RequestOptionsBase): Promise<msRest.RestResponse>;
/**
* @param resourceGroupName Azure resource group
* @param body Workspace migration request
* @param callback The callback
*/
migrate(resourceGroupName: string, body: Models.MigrateWorkspaceCollectionRequest, callback: msRest.ServiceCallback<void>): void;
/**
* @param resourceGroupName Azure resource group
* @param body Workspace migration request
* @param options The optional parameters
* @param callback The callback
*/
migrate(resourceGroupName: string, body: Models.MigrateWorkspaceCollectionRequest, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<void>): void;
migrate(resourceGroupName: string, body: Models.MigrateWorkspaceCollectionRequest, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<void>, callback?: msRest.ServiceCallback<void>): Promise<msRest.RestResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
body,
options
},
migrateOperationSpec,
callback);
}
/**
* Delete a Power BI Workspace Collection.
* @param resourceGroupName Azure resource group
* @param workspaceCollectionName Power BI Embedded Workspace Collection name
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginDeleteMethod(resourceGroupName: string, workspaceCollectionName: string, options?: msRest.RequestOptionsBase): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
resourceGroupName,
workspaceCollectionName,
options
},
beginDeleteMethodOperationSpec,
options);
}
}
// Operation Specifications
const serializer = new msRest.Serializer(Mappers);
const getByNameOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.PowerBI/workspaceCollections/{workspaceCollectionName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.workspaceCollectionName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.WorkspaceCollection
},
default: {
bodyMapper: Mappers.ErrorModel
}
},
serializer
};
const createOperationSpec: msRest.OperationSpec = {
httpMethod: "PUT",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.PowerBI/workspaceCollections/{workspaceCollectionName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.workspaceCollectionName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "body",
mapper: {
...Mappers.CreateWorkspaceCollectionRequest,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.WorkspaceCollection
},
default: {
bodyMapper: Mappers.ErrorModel
}
},
serializer
};
const updateOperationSpec: msRest.OperationSpec = {
httpMethod: "PATCH",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.PowerBI/workspaceCollections/{workspaceCollectionName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.workspaceCollectionName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "body",
mapper: {
...Mappers.UpdateWorkspaceCollectionRequest,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.WorkspaceCollection
},
default: {
bodyMapper: Mappers.ErrorModel
}
},
serializer
};
const checkNameAvailabilityOperationSpec: msRest.OperationSpec = {
httpMethod: "POST",
path: "subscriptions/{subscriptionId}/providers/Microsoft.PowerBI/locations/{location}/checkNameAvailability",
urlParameters: [
Parameters.subscriptionId,
Parameters.location
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "body",
mapper: {
...Mappers.CheckNameRequest,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.CheckNameResponse
},
default: {
bodyMapper: Mappers.ErrorModel
}
},
serializer
};
const listByResourceGroupOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.PowerBI/workspaceCollections",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.WorkspaceCollectionList
},
default: {
bodyMapper: Mappers.ErrorModel
}
},
serializer
};
const listBySubscriptionOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/providers/Microsoft.PowerBI/workspaceCollections",
urlParameters: [
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.WorkspaceCollectionList
},
default: {
bodyMapper: Mappers.ErrorModel
}
},
serializer
};
const getAccessKeysOperationSpec: msRest.OperationSpec = {
httpMethod: "POST",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.PowerBI/workspaceCollections/{workspaceCollectionName}/listKeys",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.workspaceCollectionName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.WorkspaceCollectionAccessKeys
},
default: {
bodyMapper: Mappers.ErrorModel
}
},
serializer
};
const regenerateKeyOperationSpec: msRest.OperationSpec = {
httpMethod: "POST",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.PowerBI/workspaceCollections/{workspaceCollectionName}/regenerateKey",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.workspaceCollectionName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "body",
mapper: {
...Mappers.WorkspaceCollectionAccessKey,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.WorkspaceCollectionAccessKeys
},
default: {
bodyMapper: Mappers.ErrorModel
}
},
serializer
};
const migrateOperationSpec: msRest.OperationSpec = {
httpMethod: "POST",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/moveResources",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "body",
mapper: {
...Mappers.MigrateWorkspaceCollectionRequest,
required: true
}
},
responses: {
200: {},
default: {
bodyMapper: Mappers.ErrorModel
}
},
serializer
};
const beginDeleteMethodOperationSpec: msRest.OperationSpec = {
httpMethod: "DELETE",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.PowerBI/workspaceCollections/{workspaceCollectionName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.workspaceCollectionName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
202: {},
default: {
bodyMapper: Mappers.ErrorModel
}
},
serializer
}; | the_stack |
import { PivotFieldList } from '../../src/pivotfieldlist/base/field-list';
import { createElement, remove, isNullOrUndefined } from '@syncfusion/ej2-base';
import { pivot_dataset } from '../base/datasource.spec';
import { IDataSet } from '../../src/base/engine';
import { PivotCommon } from '../../src/common/base/pivot-common';
import * as util from '../utils.spec';
import { profile, inMB, getMemoryProfile } from '../common.spec';
/**
* Pivot Field List render spec
*/
describe('Pivot Field List Rendering', () => {
beforeAll(() => {
const isDef = (o: any) => o !== undefined && o !== null;
if (!isDef(window.performance)) {
console.log("Unsupported environment, window.performance.memory is unavailable");
this.skip(); //Skips test (in Chai)
return;
}
});
describe('Field List with Static mode', () => {
let fieldListObj: PivotFieldList;
let elem: HTMLElement = createElement('div', { id: 'PivotFieldList', styles: 'height:400px;width:60%' });
afterAll(() => {
if (fieldListObj) {
fieldListObj.destroy();
}
remove(elem);
});
beforeAll(() => {
if (document.getElementById(elem.id)) {
remove(document.getElementById(elem.id));
}
document.body.appendChild(elem);
fieldListObj = new PivotFieldList(
{
dataSourceSettings: {
dataSource: pivot_dataset as IDataSet[],
expandAll: false,
enableSorting: true,
sortSettings: [{ name: 'company', order: 'Descending' }],
filterSettings: [{ name: 'name', type: 'Include', items: ['Knight Wooten'] },
{ name: 'company', type: 'Exclude', items: ['NIPAZ'] },
{ name: 'gender', type: 'Include', items: ['male'] }],
rows: [{ name: 'company' }, { name: 'state' }],
columns: [{ name: 'name', caption: 'The caption is used to show the horizontal scrollbar' }],
values: [{ name: 'balance' }, { name: 'quantity' }], filters: [{ name: 'gender' }]
},
renderMode: 'Fixed'
});
fieldListObj.appendTo('#PivotFieldList');
});
let persistdata: string;
it('control class testing', () => {
expect(document.getElementById('PivotFieldList').classList.contains('e-pivotfieldlist')).toEqual(true);
});
it('check field list control wrapper', () => {
expect(!isNullOrUndefined(fieldListObj.element.querySelector('.e-pivotfieldlist-wrapper')));
});
it('check calculated field', () => {
let controlWrapper: HTMLElement = fieldListObj.element.querySelector('.e-pivotfieldlist-wrapper');
(controlWrapper.querySelector('.e-calculated-field') as HTMLElement).click();
expect(!isNullOrUndefined(fieldListObj.element.querySelector('.e-pivotfieldlist-wrapper')));
});
});
describe('Field List with Dynamic mode', () => {
let fieldListObj: PivotFieldList;
let pivotCommon: PivotCommon;
let elem: HTMLElement = createElement('div', { id: 'PivotFieldList', styles: 'height:400px;width:60%' });
let elem1: HTMLElement = createElement('div', { id: 'PivotFieldList1', styles: 'height:400px;width:60%' });
afterAll(() => {
if (fieldListObj) {
fieldListObj.destroy();
}
remove(elem);
remove(elem1);
});
beforeAll(() => {
if (document.getElementById(elem.id)) {
remove(document.getElementById(elem.id));
}
if (document.getElementById(elem1.id)) {
remove(document.getElementById(elem1.id));
}
document.body.appendChild(elem);
document.body.appendChild(elem1);
fieldListObj = new PivotFieldList(
{
dataSourceSettings: {
dataSource: pivot_dataset as IDataSet[],
expandAll: false,
enableSorting: true,
sortSettings: [{ name: 'company', order: 'Descending' }],
filterSettings: [{ name: 'name', type: 'Include', items: ['Knight Wooten'] },
{ name: 'company', type: 'Exclude', items: ['NIPAZ'] },
{ name: 'gender', type: 'Include', items: ['male'] }],
rows: [{ name: 'company' }, { name: 'state' }],
columns: [{ name: 'name', caption: 'The caption is used to show the horizontal scrollbar' }],
values: [{ name: 'balance' }, { name: 'quantity' }], filters: [{ name: 'gender' }]
},
renderMode: 'Popup',
target: elem1
});
fieldListObj.appendTo('#PivotFieldList');
util.disableDialogAnimation(fieldListObj.dialogRenderer.fieldListDialog);
pivotCommon = fieldListObj.pivotCommon;
});
let persistdata: string;
it('control class testing', () => {
expect(fieldListObj.element.classList.contains('e-pivotfieldlist')).toEqual(true);
});
it('check field list icon', () => {
(fieldListObj.element.querySelector('.e-toggle-field-list') as HTMLElement).click();
expect(true).toBe(true);
});
it('check field list dialog with targetID', () => {
expect(!isNullOrUndefined(elem1.querySelector('.e-pivotfieldlist-wrapper')));
});
it('check calculated field', () => {
let controlWrapper: HTMLElement = elem1.querySelector('.e-pivotfieldlist-wrapper');
(controlWrapper.querySelector('.e-calculated-field') as HTMLElement).click();
expect(!isNullOrUndefined(elem1.querySelector('.e-pivotfieldlist-wrapper')));
expect(elem1.querySelector('.e-pivotfieldlist-wrapper').classList.contains('e-popup-open'));
});
it('check filter popup', (done: Function) => {
let leftAxisPanel: HTMLElement = fieldListObj.axisTableModule.axisTable.querySelector('.e-left-axis-fields');
let pivotButtons: HTMLElement[] = [].slice.call(leftAxisPanel.querySelectorAll('.e-pivot-button'));
expect(pivotButtons.length).toBeGreaterThan(0);
((pivotButtons[0]).querySelector('.e-btn-filter') as HTMLElement).click();
jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000;
setTimeout(() => {
expect(pivotCommon.filterDialog.dialogPopUp.element.classList.contains('e-popup-open')).toBe(true);
done();
}, 1000);
});
it('check close field list', () => {
let controlWrapper: HTMLElement = elem1.querySelector('.e-pivotfieldlist-wrapper');
(controlWrapper.querySelector('.e-cancel-btn') as HTMLElement).click();
expect(elem1.querySelector('.e-pivotfieldlist-wrapper').classList.contains('e-popup-close'));
});
});
describe('Field List with target ID', () => {
let fieldListObj: PivotFieldList;
let pivotCommon: PivotCommon;
let elem: HTMLElement = createElement('div', { id: 'PivotFieldList', styles: 'height:400px;width:60%' });
let elem1: HTMLElement = createElement('div', { id: 'PivotFieldList1', styles: 'height:400px;width:60%' });
afterAll(() => {
if (fieldListObj) {
fieldListObj.destroy();
}
remove(elem);
remove(elem1);
});
beforeAll(() => {
if (document.getElementById(elem.id)) {
remove(document.getElementById(elem.id));
}
if (document.getElementById(elem1.id)) {
remove(document.getElementById(elem1.id));
}
document.body.appendChild(elem);
document.body.appendChild(elem1);
fieldListObj = new PivotFieldList(
{
dataSourceSettings: {
dataSource: pivot_dataset as IDataSet[],
expandAll: false,
enableSorting: true,
sortSettings: [{ name: 'company', order: 'Descending' }],
filterSettings: [{ name: 'name', type: 'Include', items: ['Knight Wooten'] },
{ name: 'company', type: 'Exclude', items: ['NIPAZ'] },
{ name: 'gender', type: 'Include', items: ['male'] }],
rows: [{ name: 'company' }, { name: 'state' }],
columns: [{ name: 'name' }],
values: [{ name: 'balance' }, { name: 'quantity' }], filters: [{ name: 'gender' }]
},
renderMode: 'Popup',
target: '#' + elem1.id
});
fieldListObj.appendTo('#PivotFieldList');
util.disableDialogAnimation(fieldListObj.dialogRenderer.fieldListDialog);
pivotCommon = fieldListObj.pivotCommon;
});
let persistdata: string;
it('control class testing', () => {
expect(fieldListObj.element.classList.contains('e-pivotfieldlist')).toEqual(true);
});
it('check field list icon', () => {
(fieldListObj.element.querySelector('.e-toggle-field-list') as HTMLElement).click();
expect(true).toBe(true);
});
it('check field list dialog with targetID', () => {
expect(!isNullOrUndefined(elem1.querySelector('.e-pivotfieldlist-wrapper')));
});
it('check calculated field', () => {
let controlWrapper: HTMLElement = elem1.querySelector('.e-pivotfieldlist-wrapper');
(controlWrapper.querySelector('.e-calculated-field') as HTMLElement).click();
expect(!isNullOrUndefined(elem1.querySelector('.e-pivotfieldlist-wrapper')));
expect(elem1.querySelector('.e-pivotfieldlist-wrapper').classList.contains('e-popup-open'));
});
it('check filter popup', (done: Function) => {
let leftAxisPanel: HTMLElement = fieldListObj.axisTableModule.axisTable.querySelector('.e-left-axis-fields');
let pivotButtons: HTMLElement[] = [].slice.call(leftAxisPanel.querySelectorAll('.e-pivot-button'));
expect(pivotButtons.length).toBeGreaterThan(0);
((pivotButtons[0]).querySelector('.e-btn-filter') as HTMLElement).click();
jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000;
setTimeout(() => {
expect(pivotCommon.filterDialog.dialogPopUp.element.classList.contains('e-popup-open')).toBe(true);
done();
}, 1000);
});
it('check close field list', () => {
let controlWrapper: HTMLElement = elem1.querySelector('.e-pivotfieldlist-wrapper');
(controlWrapper.querySelector('.e-cancel-btn') as HTMLElement).click();
expect(elem1.querySelector('.e-pivotfieldlist-wrapper').classList.contains('e-popup-close'));
});
});
it('memory leak', () => {
profile.sample();
let average: any = inMB(profile.averageChange);
//Check average change in memory samples to not be over 10MB
expect(average).toBeLessThan(10);
let memory: any = inMB(getMemoryProfile());
//Check the final memory usage against the first usage, there should be little change if everything was properly deallocated
expect(memory).toBeLessThan(profile.samples[0] + 0.25);
});
}); | the_stack |
import { VariantProps } from "@stitches/core";
import { css } from "../../styled-system/stitches.config";
import { SystemStyleObject } from "../../styled-system/types";
/* -------------------------------------------------------------------------------------------------
* Tabs
* -----------------------------------------------------------------------------------------------*/
export const tabsStyles = css({
variants: {
orientation: {
horizontal: {
display: "block",
},
vertical: {
display: "flex",
},
},
},
});
export type TabsVariants = VariantProps<typeof tabsStyles>;
/* -------------------------------------------------------------------------------------------------
* TabList
* -----------------------------------------------------------------------------------------------*/
export const tabListStyles = css({
display: "flex",
color: "$neutral11",
fontWeight: "$normal",
variants: {
variant: {
underline: {
borderWidth: 0,
borderStyle: "solid",
borderColor: "$neutral7",
},
outline: {
borderStyle: "solid",
borderColor: "$neutral7",
},
cards: {
borderStyle: "solid",
borderColor: "$neutral7",
},
pills: {
gap: "$1_5",
},
},
alignment: {
start: {
justifyContent: "flex-start",
},
end: {
justifyContent: "flex-end",
},
center: {
justifyContent: "center",
},
apart: {
justifyContent: "space-between",
},
},
orientation: {
horizontal: {
flexDirection: "row",
},
vertical: {
flexDirection: "column",
},
},
},
compoundVariants: [
/* -------------------------------------------------------------------------------------------------
* Variant - underline + orientation
* -----------------------------------------------------------------------------------------------*/
{
variant: "underline",
orientation: "horizontal",
css: {
borderBottomWidth: "1px",
},
},
{
variant: "underline",
orientation: "vertical",
css: {
borderInlineEndWidth: "1px",
},
},
/* -------------------------------------------------------------------------------------------------
* Variant - outline + orientation
* -----------------------------------------------------------------------------------------------*/
{
variant: "outline",
orientation: "horizontal",
css: {
mb: "-1px",
borderBottomWidth: "1px",
},
},
{
variant: "outline",
orientation: "vertical",
css: {
marginInlineEnd: "-1px",
borderInlineEndWidth: "1px",
},
},
/* -------------------------------------------------------------------------------------------------
* Variant - cards + orientation
* -----------------------------------------------------------------------------------------------*/
{
variant: "cards",
orientation: "horizontal",
css: {
mb: "-1px",
borderBottomWidth: "1px",
},
},
{
variant: "cards",
orientation: "vertical",
css: {
marginInlineEnd: "-1px",
borderInlineEndWidth: "1px",
},
},
],
});
export type TabListVariants = VariantProps<typeof tabListStyles>;
/* -------------------------------------------------------------------------------------------------
* Tab
* -----------------------------------------------------------------------------------------------*/
function createSelectedColorVariant(color: string): SystemStyleObject {
return {
"&[aria-selected='true']": {
color,
},
};
}
function createPillsAndColorVariant(config: {
color: string;
bgColor: string;
bgColorHover: string;
}): SystemStyleObject {
return {
"&[aria-selected='true']": {
color: config.color,
backgroundColor: config.bgColor,
},
"&[aria-selected='true']:hover": {
backgroundColor: config.bgColorHover,
},
};
}
export const tabStyles = css({
appearance: "none",
display: "flex",
alignItems: "center",
justifyContent: "center",
outline: "none",
border: "$none",
backgroundColor: "transparent",
px: "$4",
color: "inherit",
fontWeight: "inherit",
cursor: "pointer",
transitionProperty:
"background-color, border-color, color, fill, stroke, opacity, box-shadow, transform",
transitionDuration: "250ms",
"&:focus": {
zIndex: 1,
outline: "none",
boxShadow: "$outline",
},
"&:disabled": {
opacity: 0.4,
cursor: "not-allowed",
},
variants: {
variant: {
underline: {
borderWidth: 0,
borderStyle: "solid",
borderColor: "transparent",
"&[aria-selected='true']": {
borderColor: "currentColor",
},
"&:active": {
backgroundColor: "$neutral4",
},
},
outline: {
borderWidth: "1px",
borderStyle: "solid",
borderColor: "transparent",
"&[aria-selected='true']": {
borderColor: "inherit",
},
},
cards: {
borderWidth: "1px",
borderStyle: "solid",
borderColor: "inherit",
backgroundColor: "$neutral3",
"&[aria-selected='true']": {
borderColor: "inherit",
backgroundColor: "$loContrast",
},
},
pills: {
borderRadius: "$sm",
"&:hover": {
backgroundColor: "$neutral3",
},
"&:hover:disabled": {
backgroundColor: "transparent",
},
},
},
colorScheme: {
primary: {},
accent: {},
neutral: {},
success: {},
info: {},
warning: {},
danger: {},
},
size: {
sm: {
py: "$1",
fontSize: "$sm",
},
md: {
py: "$2",
fontSize: "$base",
},
lg: {
py: "$3",
fontSize: "$lg",
},
},
orientation: {
horizontal: {},
vertical: {},
},
fitted: {
true: {
flex: 1,
},
},
},
compoundVariants: [
/* -------------------------------------------------------------------------------------------------
* Variant - underline + colorScheme
* -----------------------------------------------------------------------------------------------*/
{
variant: "underline",
colorScheme: "primary",
css: createSelectedColorVariant("$primary11"),
},
{
variant: "underline",
colorScheme: "accent",
css: createSelectedColorVariant("$accent11"),
},
{
variant: "underline",
colorScheme: "neutral",
css: createSelectedColorVariant("$neutral12"),
},
{
variant: "underline",
colorScheme: "success",
css: createSelectedColorVariant("$success11"),
},
{
variant: "underline",
colorScheme: "info",
css: createSelectedColorVariant("$info11"),
},
{
variant: "underline",
colorScheme: "warning",
css: createSelectedColorVariant("$warning11"),
},
{
variant: "underline",
colorScheme: "danger",
css: createSelectedColorVariant("$danger11"),
},
/* -------------------------------------------------------------------------------------------------
* Variant - outline + colorScheme
* -----------------------------------------------------------------------------------------------*/
{
variant: "outline",
colorScheme: "primary",
css: createSelectedColorVariant("$primary11"),
},
{
variant: "outline",
colorScheme: "accent",
css: createSelectedColorVariant("$accent11"),
},
{
variant: "outline",
colorScheme: "neutral",
css: createSelectedColorVariant("$neutral12"),
},
{
variant: "outline",
colorScheme: "success",
css: createSelectedColorVariant("$success11"),
},
{
variant: "outline",
colorScheme: "info",
css: createSelectedColorVariant("$info11"),
},
{
variant: "outline",
colorScheme: "warning",
css: createSelectedColorVariant("$warning11"),
},
{
variant: "outline",
colorScheme: "danger",
css: createSelectedColorVariant("$danger11"),
},
/* -------------------------------------------------------------------------------------------------
* Variant - cards + colorScheme
* -----------------------------------------------------------------------------------------------*/
{
variant: "cards",
colorScheme: "primary",
css: createSelectedColorVariant("$primary11"),
},
{
variant: "cards",
colorScheme: "accent",
css: createSelectedColorVariant("$accent11"),
},
{
variant: "cards",
colorScheme: "neutral",
css: createSelectedColorVariant("$neutral12"),
},
{
variant: "cards",
colorScheme: "success",
css: createSelectedColorVariant("$success11"),
},
{
variant: "cards",
colorScheme: "info",
css: createSelectedColorVariant("$info11"),
},
{
variant: "cards",
colorScheme: "warning",
css: createSelectedColorVariant("$warning11"),
},
{
variant: "cards",
colorScheme: "danger",
css: createSelectedColorVariant("$danger11"),
},
/* -------------------------------------------------------------------------------------------------
* Variant - pills + colorScheme
* -----------------------------------------------------------------------------------------------*/
{
variant: "pills",
colorScheme: "primary",
css: createPillsAndColorVariant({
color: "$primary11",
bgColor: "$primary3",
bgColorHover: "$primary4",
}),
},
{
variant: "pills",
colorScheme: "accent",
css: createPillsAndColorVariant({
color: "$accent11",
bgColor: "$accent3",
bgColorHover: "$accent4",
}),
},
{
variant: "pills",
colorScheme: "neutral",
css: createPillsAndColorVariant({
color: "$neutral12",
bgColor: "$neutral3",
bgColorHover: "$neutral4",
}),
},
{
variant: "pills",
colorScheme: "success",
css: createPillsAndColorVariant({
color: "$success11",
bgColor: "$success3",
bgColorHover: "$success4",
}),
},
{
variant: "pills",
colorScheme: "info",
css: createPillsAndColorVariant({
color: "$info11",
bgColor: "$info3",
bgColorHover: "$info4",
}),
},
{
variant: "pills",
colorScheme: "warning",
css: createPillsAndColorVariant({
color: "$warning11",
bgColor: "$warning3",
bgColorHover: "$warning4",
}),
},
{
variant: "pills",
colorScheme: "danger",
css: createPillsAndColorVariant({
color: "$danger11",
bgColor: "$danger3",
bgColorHover: "$danger4",
}),
},
/* -------------------------------------------------------------------------------------------------
* Variant - underline + orientation
* -----------------------------------------------------------------------------------------------*/
{
variant: "underline",
orientation: "horizontal",
css: {
borderBottomWidth: "2px",
marginBottom: "-1px",
},
},
{
variant: "underline",
orientation: "vertical",
css: {
borderInlineEndWidth: "2px",
marginInlineEnd: "-1px",
},
},
/* -------------------------------------------------------------------------------------------------
* Variant - outline + orientation
* -----------------------------------------------------------------------------------------------*/
{
variant: "outline",
orientation: "horizontal",
css: {
mb: "-1px",
borderTopRadius: "$sm",
"&[aria-selected='true']": {
borderBottomColor: "$loContrast",
},
},
},
{
variant: "outline",
orientation: "vertical",
css: {
marginInlineEnd: "-1px",
borderStartRadius: "$radii$sm",
"&[aria-selected='true']": {
borderInlineEndColor: "$colors$loContrast",
},
},
},
/* -------------------------------------------------------------------------------------------------
* Variant - cards + orientation
* -----------------------------------------------------------------------------------------------*/
{
variant: "cards",
orientation: "horizontal",
css: {
mb: "-1px",
borderBottomWidth: "1px",
"&:not(:last-of-type)": {
marginInlineEnd: "-1px",
},
"&[aria-selected='true']": {
borderTopColor: "currentColor",
borderBottomColor: "transparent",
},
},
},
{
variant: "cards",
orientation: "vertical",
css: {
marginInlineEnd: "-1px",
borderInlineEndWidth: "1px",
"&:not(:last-of-type)": {
mb: "-1px",
},
"&[aria-selected='true']": {
borderInlineStartColor: "currentColor",
borderInlineEndColor: "transparent",
},
},
},
],
});
export type TabVariants = VariantProps<typeof tabStyles>;
/* -------------------------------------------------------------------------------------------------
* TabPanel
* -----------------------------------------------------------------------------------------------*/
export const tabPanelStyles = css({
outline: "none",
padding: "$4",
}); | the_stack |
import { updateSituation } from 'Actions/actions'
import Aide from 'Components/conversation/Aide'
import RuleInput from 'Components/conversation/RuleInput'
import { Condition } from 'Components/EngineValue'
import PageHeader from 'Components/PageHeader'
import PreviousSimulationBanner from 'Components/PreviousSimulationBanner'
import 'Components/TargetSelection.css'
import { FromTop } from 'Components/ui/animate'
import Warning from 'Components/ui/WarningBlock'
import useSimulationConfig from 'Components/utils/useSimulationConfig'
import { useCallback } from 'react'
import { Trans } from 'react-i18next'
import { useDispatch, useSelector } from 'react-redux'
import { RootState } from 'Reducers/rootReducer'
import { situationSelector } from 'Selectors/simulationSelectors'
import styled from 'styled-components'
import { TrackPage } from '../../../ATInternetTracking'
import { CompanySection } from '../Home'
import simulationConfig from './config.yaml'
import { ExplicationsResultatFiscal } from './ExplicationResultatFiscal'
import { SimpleField, SubSection } from './Fields'
import ResultatsSimples from './RésultatSimple'
import ResultatsParFormulaire from './RésultatsParFormulaire'
import illustration from './undraw_fill_in_mie5.svg'
/**
* Nous avons proposé une nouvelle vision des résultat plus complète, avec une proposition d'aide pour
* l'ensemble des cases liées aux cotisations sociales.
*
* Hors de propos pour 2021, étant donné que cela prendrait beaucoup de temps à valider par la DGFiP
* En attendant, on propose la version "simple" (mais moins utile).
*
* Le but est de faire valider la version plus complète pour la déclaration de revenu 2021.
*/
const FEATURE_FLAG_RESULTATS_COMPLETS =
document.location.search.includes('next')
export default function AideDéclarationIndépendant() {
useSimulationConfig(simulationConfig)
const company = useSelector(
(state: RootState) => state.inFranceApp.existingCompany
)
const situation = useSelector(situationSelector)
return (
<>
<Trans i18nKey="aide-déclaration-indépendant.description">
<PageHeader picture={illustration}>
<p className="ui__ lead">
Cet outil est une aide à la déclaration de revenus à destination des{' '}
<strong>travailleurs indépendants</strong>. Il vous permet de
connaître le montant des charges sociales déductibles.
</p>
<p className="ui__ notice">
Vous restez entièrement responsable d'éventuelles omissions ou
inexactitudes dans votre déclaration.
</p>
</PageHeader>
<Warning localStorageKey="aide-déclaration-indépendant.warning">
<h3>Cet outil vous concerne si vous êtes dans le cas suivant :</h3>
<ul>
<li>
vous cotisez au régime général des travailleurs indépendants
</li>
</ul>
<h3>
Il ne vous concerne pas si vous êtes dans un des cas suivants :
</h3>
<ul>
<li>
vous exercez une activité libérale relevant d’un régime de
retraite des professions libérales en comptabilité d'engagement
</li>
<li>votre entreprise est domiciliée dans les DOM</li>
</ul>
</Warning>
<PreviousSimulationBanner />
<h2>Imposition</h2>
<p className="ui__ notice">
Ces quelques questions permettent de déterminer le type de déclaration
à remplir, ainsi que les modalités de calcul des cotisations sociales.
</p>
</Trans>
{Object.keys(situation).length ? (
<TrackPage name="commence" />
) : (
<TrackPage name="accueil" />
)}
<ImpositionSection />
<FromTop>
<FormBlock>
<Condition expression="aide déclaration revenu indépendant 2020 . comptabilité . engagement">
<Trans i18nKey="aide-déclaration-indépendant.entreprise.titre">
<h2>Entreprise et activité</h2>
</Trans>
<div>
{!company && (
<p className="ui__ notice">
<Trans i18nKey="aide-déclaration-indépendant.entreprise.description">
<strong>Facultatif : </strong>Vous pouvez renseigner votre
entreprise pour pré-remplir le formulaire
</Trans>
</p>
)}
<CompanySection company={company} />
</div>
<SimpleField
dottedName="entreprise . date de création"
showSuggestions={false}
/>
<Condition expression="entreprise . date de création > 31/12/2020">
<small
css={`
color: #ff2d96;
`}
>
Cette aide à la déclaration concerne uniquement les entreprises
déjà en activité en 2020
</small>
</Condition>
<SubSection dottedName="aide déclaration revenu indépendant 2020 . nature de l'activité" />
{/* PLNR */}
<SimpleField dottedName="entreprise . activité . débit de tabac" />
<SimpleField dottedName="dirigeant . indépendant . cotisations et contributions . déduction tabac" />
<SimpleField dottedName="dirigeant . indépendant . PL . régime général . taux spécifique retraite complémentaire" />
<h2>
<Trans>Situation personnelle</Trans>
</h2>
<SimpleField dottedName="situation personnelle . RSA" />
<Condition expression="entreprise . imposition . IR . micro-fiscal = non">
<SubSection dottedName="dirigeant . indépendant . IJSS" />
</Condition>
<SubSection dottedName="dirigeant . indépendant . conjoint collaborateur" />
<h2>
<Trans>Exonérations</Trans>
</h2>
<SimpleField dottedName="aide déclaration revenu indépendant 2020 . ACRE" />
<SimpleField dottedName="établissement . ZFU" />
<SubSection hideTitle dottedName="entreprise . effectif . seuil" />
<SubSection
dottedName="dirigeant . indépendant . cotisations et contributions . exonérations"
hideTitle
/>
{FEATURE_FLAG_RESULTATS_COMPLETS && (
<SubSection dottedName="dirigeant . indépendant . cotisations facultatives" />
)}
<h2>
<Trans>International</Trans>
</h2>
<SimpleField dottedName="situation personnelle . domiciliation fiscale à l'étranger" />
<Condition expression="entreprise . imposition . IR . micro-fiscal = non">
<SubSection
dottedName="dirigeant . indépendant . revenus étrangers"
hideTitle
/>
</Condition>
<SubSection dottedName="aide déclaration revenu indépendant 2020 . réduction covid" />
</Condition>
<Condition expression="aide déclaration revenu indépendant 2020 . cotisations payées">
<SubSection dottedName="aide déclaration revenu indépendant 2020 . cotisations payées" />
<SimpleField dottedName="aide déclaration revenu indépendant 2020 . nature de l'activité" />
<SimpleField dottedName="dirigeant . indépendant . conjoint collaborateur" />
<SubSection dottedName="dirigeant . indépendant . cotisations facultatives" />
{/* We can't use a subsection here cause revenu étrangers is not missing when CSG is replaced */}
<h3>
<Trans>Revenus étranger</Trans>
</h3>
<SimpleField dottedName="dirigeant . indépendant . revenus étrangers" />
<Condition expression="dirigeant . indépendant . revenus étrangers">
<SimpleField dottedName="dirigeant . indépendant . revenus étrangers . montant" />
</Condition>
</Condition>
<Condition expression="aide déclaration revenu indépendant 2020 . cotisations payées version simple">
<SimpleField dottedName="aide déclaration revenu indépendant 2020 . cotisations payées version simple . cotisations sociales" />
<SimpleField dottedName="aide déclaration revenu indépendant 2020 . cotisations payées version simple . CSG déductible et CFP" />
</Condition>
</FormBlock>
</FromTop>
{FEATURE_FLAG_RESULTATS_COMPLETS ? (
<>
<SubSection dottedName="aide déclaration revenu indépendant 2020 . régime d'imposition" />
<Condition
expression={{
'une de ces conditions': [
"aide déclaration revenu indépendant 2020 . régime d'imposition . réel",
"aide déclaration revenu indépendant 2020 . régime d'imposition . déclaration contrôlée",
'entreprise . imposition . IR . micro-fiscal',
],
}}
>
<TrackPage name="simulation terminée" />
<ResultatsParFormulaire />
</Condition>
</>
) : (
<ResultatsSimples />
)}
<Aide />
</>
)
}
function ImpositionSection() {
const dispatch = useDispatch()
const situation = useSelector(situationSelector)
const setSituation = useCallback(
(value, dottedName) => {
dispatch(updateSituation(dottedName, value))
},
[dispatch]
)
return (
<>
<SimpleField dottedName="entreprise . imposition" />
{situation['entreprise . imposition'] != null && (
<>
{/* <WhenApplicable dottedName="aide déclaration revenu indépendant 2020 . comptabilité"> */}
<SimpleField dottedName="aide déclaration revenu indépendant 2020 . comptabilité" />
{/* </WhenApplicable> */}
<Condition
expression={
FEATURE_FLAG_RESULTATS_COMPLETS
? 'oui'
: 'aide déclaration revenu indépendant 2020 . cotisations payées version simple = non'
}
>
<FromTop key={situation['entreprise . imposition']}>
<Condition expression="entreprise . imposition . IR">
<SimpleField dottedName="entreprise . imposition . IR . micro-fiscal" />
<Condition expression="entreprise . imposition . IR . micro-fiscal">
<h2>
Quel est votre chiffre d'affaires hors taxes en 2020 ?
</h2>
<p className="ui__ notice">
Indiquez le montant hors taxes de votre chiffre d’affaires
ou de vos recettes bruts (avant déduction de l’abattement
forfaitaire pour frais et charges) et avant déduction des
exonérations fiscales dont vous avez bénéficié
</p>
<SimpleField dottedName="entreprise . chiffre d'affaires . vente restauration hébergement" />
<SimpleField dottedName="entreprise . chiffre d'affaires . service BIC" />
<SimpleField dottedName="entreprise . chiffre d'affaires . service BNC" />
</Condition>
<Condition expression="entreprise . imposition . IR . micro-fiscal = non">
<h2>
Quel est votre résultat fiscal en 2020 ?<br />
<small>
Charges sociales et exonérations fiscales non incluses{' '}
<ExplicationsResultatFiscal />
</small>
</h2>
<p className="ui__ notice">
Le résultat fiscal correspond aux produits moins les
charges. Il peut être positif (bénéfice) ou négatif
(déficit).
</p>
<BigInput>
<RuleInput
dottedName="dirigeant . rémunération . totale"
onChange={setSituation}
autoFocus
/>
</BigInput>
</Condition>
</Condition>
<Condition expression="entreprise . imposition . IS">
<h2>
Quel est le montant net de votre rémunération en 2020 ?
<br />
<small>Sans tenir compte des charges sociales</small>
</h2>
<BigInput>
<RuleInput
dottedName="dirigeant . rémunération . nette"
onChange={setSituation}
autoFocus
/>
</BigInput>
</Condition>
</FromTop>
</Condition>
</>
)}
</>
)
}
export const Question = styled.div`
margin-top: 1em;
`
const BigInput = styled.div`
font-size: 130%;
`
const FormBlock = styled.section`
max-width: 500px;
padding: 0;
h3 {
margin-top: 2rem;
}
h2 {
border-top: 1px solid var(--lighterColor);
padding-top: 2rem;
}
select,
input[type='text'] {
font-size: 1.05em;
padding: 5px 10px;
}
ul {
padding: 0;
margin: 0;
}
` | the_stack |
import { expect } from 'chai';
import { Observable, NEVER, of, ObjectUnsubscribedError, EMPTY } from 'rxjs';
import { windowToggle, tap, mergeMap } from 'rxjs/operators';
import { TestScheduler } from 'rxjs/testing';
import { observableMatcher } from '../helpers/observableMatcher';
/** @test {windowToggle} */
describe('windowToggle', () => {
let rxTestScheduler: TestScheduler;
beforeEach(() => {
rxTestScheduler = new TestScheduler(observableMatcher);
});
it('should emit windows governed by openings and closings', () => {
rxTestScheduler.run(({ hot, cold, expectObservable, expectSubscriptions }) => {
const e2 = cold(' ----w--------w--------w--|');
const e2subs = ' ^------------------------!';
const e3 = cold(' -----x ');
// -----x
// -----x
const e3subs = [
' ----^----! ',
' -------------^----! ',
' ----------------------^--!',
];
const e1 = hot(' --1--2--^-a--b--c--d--e--f--g--h-|');
const e1subs = ' ^------------------------!';
const expected = ' ----x--------y--------z--|';
const x = cold(' -b--c| ');
const y = cold(' -e--f| ');
const z = cold(' -h-|');
const values = { x, y, z };
const result = e1.pipe(windowToggle(e2, () => e3));
expectObservable(result).toBe(expected, values);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
expectSubscriptions(e2.subscriptions).toBe(e2subs);
expectSubscriptions(e3.subscriptions).toBe(e3subs);
});
});
it('should emit windows that are opened by an observable from the first argument and closed by an observable returned by the function in the second argument', () => {
rxTestScheduler.run(({ hot, cold, expectObservable, expectSubscriptions }) => {
const e2 = cold(' --------x-------x-------x--|');
const e2subs = ' ^--------------------------!';
const e3 = cold(' ----------(x|) ');
// ----------(x|)
// ----------(x|)
const e3subs = [
' --------^---------! ',
' ----------------^---------! ',
' ------------------------^--!',
];
const e1 = hot('--1--2--^--a--b--c--d--e--f--g--h--|');
const e1subs = ' ^--------------------------!';
const expected = ' --------x-------y-------z--|';
const x = cold(' -c--d--e--(f|) ');
const y = cold(' --f--g--h-| ');
const z = cold(' ---|');
const values = { x, y, z };
const source = e1.pipe(
windowToggle(e2, (value: string) => {
expect(value).to.equal('x');
return e3;
})
);
expectObservable(source).toBe(expected, values);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
expectSubscriptions(e2.subscriptions).toBe(e2subs);
expectSubscriptions(e3.subscriptions).toBe(e3subs);
});
});
it('should emit windows using varying cold closings', () => {
rxTestScheduler.run(({ hot, cold, expectObservable, expectSubscriptions }) => {
const e2 = cold(' --x-----------y--------z---| ');
const e2subs = ' ^--------------------------! ';
const close = [
cold(' ---------------s--| '),
cold(' ----(s|) '),
cold(' ---------------(s|)'),
];
const closeSubs = [
' --^--------------! ',
' --------------^---! ',
' -----------------------^-----------! ',
];
const e1 = hot('--a--^---b---c---d---e---f---g---h------| ');
const e1subs = ' ^----------------------------------! ';
const expected = ' --x-----------y--------z-----------| ';
const x = cold(' --b---c---d---e| ');
const y = cold(' --e-| ');
const z = cold(' -g---h------| ');
const values = { x, y, z };
let i = 0;
const result = e1.pipe(windowToggle(e2, () => close[i++]));
expectObservable(result).toBe(expected, values);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
expectSubscriptions(e2.subscriptions).toBe(e2subs);
expectSubscriptions(close[0].subscriptions).toBe(closeSubs[0]);
expectSubscriptions(close[1].subscriptions).toBe(closeSubs[1]);
expectSubscriptions(close[2].subscriptions).toBe(closeSubs[2]);
});
});
it('should emit windows using varying hot closings', () => {
rxTestScheduler.run(({ hot, cold, expectObservable, expectSubscriptions }) => {
const e2 = cold(' --x-----------y--------z---| ');
const e2subs = ' ^--------------------------! ';
const closings = [
hot(' -1--^----------------s-| '),
hot(' -----3----4-------(s|) '),
hot(' -------3----4-------5----------------s|'),
];
const closingSubs = [
' --^--------------! ',
' --------------^---! ',
' -----------------------^-----------! ',
];
const e1 = hot('--a--^---b---c---d---e---f---g---h------| ');
const e1subs = ' ^----------------------------------! ';
const expected = ' --x-----------y--------z-----------| ';
const x = cold(' --b---c---d---e| ');
const y = cold(' --e-| ');
const z = cold(' -g---h------| ');
const values = { x, y, z };
let i = 0;
const result = e1.pipe(windowToggle(e2, () => closings[i++]));
expectObservable(result).toBe(expected, values);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
expectSubscriptions(e2.subscriptions).toBe(e2subs);
expectSubscriptions(closings[0].subscriptions).toBe(closingSubs[0]);
expectSubscriptions(closings[1].subscriptions).toBe(closingSubs[1]);
expectSubscriptions(closings[2].subscriptions).toBe(closingSubs[2]);
});
});
it('should emit windows using varying empty delayed closings', () => {
rxTestScheduler.run(({ hot, cold, expectObservable, expectSubscriptions }) => {
const e2 = cold(' --x-----------y--------z---| ');
const e2subs = ' ^--------------------------! ';
const close = [
cold(' ---------------| '),
cold(' ----| '),
cold(' ---------------|'),
];
const e1 = hot('--a--^---b---c---d---e---f---g---h------| ');
const e1subs = ' ^----------------------------------! ';
const expected = ' --x-----------y--------z-----------| ';
const x = cold(' --b---c---d---e---f---g---h------| ');
const y = cold(' --e---f---g---h------| ');
const z = cold(' -g---h------| ');
const values = { x, y, z };
let i = 0;
const result = e1.pipe(windowToggle(e2, () => close[i++]));
expectObservable(result).toBe(expected, values);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
expectSubscriptions(e2.subscriptions).toBe(e2subs);
});
});
it('should emit windows using varying cold closings, outer unsubscribed early', () => {
rxTestScheduler.run(({ hot, cold, expectObservable, expectSubscriptions }) => {
const e2 = cold(' --x-----------y--------z---| ');
const e2subs = ' ^----------------! ';
const close = [
cold(' -------------s---| '),
cold(' -----(s|) '),
cold(' ---------------(s|)'),
];
const closeSubs = [
' --^------------! ',
' --------------^--! ',
];
const e1 = hot('--a--^---b---c---d---e---f---g---h------| ');
const e1subs = ' ^----------------! ';
const expected = ' --x-----------y--- ';
const x = cold(' --b---c---d--| ');
const y = cold(' --e- ');
const unsub = ' -----------------! ';
const values = { x, y };
let i = 0;
const result = e1.pipe(windowToggle(e2, () => close[i++]));
expectObservable(result, unsub).toBe(expected, values);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
expectSubscriptions(e2.subscriptions).toBe(e2subs);
expectSubscriptions(close[0].subscriptions).toBe(closeSubs[0]);
expectSubscriptions(close[1].subscriptions).toBe(closeSubs[1]);
expectSubscriptions(close[2].subscriptions).toBe([]);
});
});
it('should not break unsubscription chains when result is unsubscribed explicitly', () => {
rxTestScheduler.run(({ hot, cold, expectObservable, expectSubscriptions }) => {
const e2 = cold(' --x-----------y--------z---| ');
const e2subs = ' ^--------------! ';
const close = [
cold(' ---------------s--| '),
cold(' ----(s|) '),
cold(' ---------------(s|)'),
];
const closeSubs = [
' --^------------! ',
' --------------^! ',
];
const e1 = hot('--a--^---b---c---d---e---f---g---h------| ');
const e1subs = ' ^--------------! ';
const expected = ' --x-----------y- ';
const x = cold(' --b---c---d--- ');
const y = cold(' -- ');
const unsub = ' ---------------! ';
const values = { x, y };
let i = 0;
const result = e1.pipe(
mergeMap((x) => of(x)),
windowToggle(e2, () => close[i++]),
mergeMap((x) => of(x))
);
expectObservable(result, unsub).toBe(expected, values);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
expectSubscriptions(e2.subscriptions).toBe(e2subs);
expectSubscriptions(close[0].subscriptions).toBe(closeSubs[0]);
expectSubscriptions(close[1].subscriptions).toBe(closeSubs[1]);
});
});
it('should dispose window Subjects if the outer is unsubscribed early', () => {
rxTestScheduler.run(({ hot, cold, expectObservable, expectSubscriptions, time }) => {
const open = cold(' o-------------------------|');
const e1 = hot(' --a--b--c--d--e--f--g--h--|');
const e1subs = ' ^--------! ';
const expected = ' x--------- ';
const x = cold(' --a--b--c- ');
const unsub = ' ---------! ';
const late = time(' ---------------| ');
const values = { x };
let window: Observable<string>;
const result = e1.pipe(
windowToggle(open, () => NEVER),
tap((w) => {
window = w;
})
);
expectObservable(result, unsub).toBe(expected, values);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
rxTestScheduler.schedule(() => {
expect(() => {
window.subscribe();
}).to.throw(ObjectUnsubscribedError);
}, late);
});
});
it('should propagate error thrown from closingSelector', () => {
rxTestScheduler.run(({ hot, cold, expectObservable, expectSubscriptions }) => {
const e2 = cold(' --x-----------y--------z---| ');
const e2subs = ' ^-------------! ';
const close = [
cold(' ---------------s--| '),
cold(' ----(s|) '),
cold(' ---------------(s|)'),
];
const e1 = hot('--a--^---b---c---d---e---f---g---h------| ');
const e1subs = ' ^-------------! ';
const expected = ' --x-----------#---- ';
const x = cold(' --b---c---d-# ');
const values = { x: x };
let i = 0;
const result = e1.pipe(
windowToggle(e2, () => {
if (i === 1) {
throw 'error';
}
return close[i++];
})
);
expectObservable(result).toBe(expected, values);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
expectSubscriptions(e2.subscriptions).toBe(e2subs);
});
});
it('should propagate error emitted from a closing', () => {
rxTestScheduler.run(({ hot, cold, expectObservable, expectSubscriptions }) => {
const e2 = cold(' --x-----------y--------z---| ');
const e2subs = ' ^-------------! ';
// prettier-ignore
const close = [
cold(' ---------------s--| '),
cold(' # ')
];
const e1 = hot('--a--^---b---c---d---e---f---g---h------|');
const e1subs = ' ^-------------! ';
const expected = ' --x-----------(y#) ';
const x = cold(' --b---c---d-# ');
const y = cold(' # ');
const values = { x, y };
let i = 0;
const result = e1.pipe(windowToggle(e2, () => close[i++]));
expectObservable(result).toBe(expected, values);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
expectSubscriptions(e2.subscriptions).toBe(e2subs);
});
});
it('should propagate error emitted late from a closing', () => {
rxTestScheduler.run(({ hot, cold, expectObservable, expectSubscriptions }) => {
const e2 = cold(' --x-----------y--------z---| ');
const e2subs = ' ^------------------! ';
// prettier-ignore
const close = [
cold(' ---------------s--| '),
cold(' -----# ')
];
const e1 = hot('--a--^---b---c---d---e---f---g---h------|');
const e1subs = ' ^------------------! ';
const expected = ' --x-----------y----# ';
const x = cold(' --b---c---d---e| ');
const y = cold(' --e--# ');
const values = { x, y };
let i = 0;
const result = e1.pipe(windowToggle(e2, () => close[i++]));
expectObservable(result).toBe(expected, values);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
expectSubscriptions(e2.subscriptions).toBe(e2subs);
});
});
it('should handle errors', () => {
rxTestScheduler.run(({ hot, cold, expectObservable, expectSubscriptions }) => {
const e2 = cold(' --x-----------y--------z---| ');
const e2subs = ' ^------------------! ';
// prettier-ignore
const close = [
cold(' ---------------s--| '),
cold(' -------s| ')
];
const e1 = hot('--a--^---b---c---d---e--# ');
const e1subs = ' ^------------------! ';
const expected = ' --x-----------y----# ';
const x = cold(' --b---c---d---e| ');
const y = cold(' --e--# ');
const values = { x, y };
let i = 0;
const result = e1.pipe(windowToggle(e2, () => close[i++]));
expectObservable(result).toBe(expected, values);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
expectSubscriptions(e2.subscriptions).toBe(e2subs);
});
});
it('should handle empty source', () => {
rxTestScheduler.run(({ cold, expectObservable, expectSubscriptions }) => {
const e2 = cold('--o-----|');
const e2subs = ' (^!)';
const e3 = cold(' -----c--|');
const e1 = cold(' |');
const e1subs = ' (^!)';
const expected = ' |';
const result = e1.pipe(windowToggle(e2, () => e3));
expectObservable(result).toBe(expected);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
expectSubscriptions(e2.subscriptions).toBe(e2subs);
});
});
it('should handle throw', () => {
rxTestScheduler.run(({ cold, expectObservable, expectSubscriptions }) => {
const e2 = cold(' --o-----|');
const e2subs = ' (^!)';
const e3 = cold(' -----c--|');
const e1 = cold(' #');
const e1subs = ' (^!)';
const expected = '#';
const result = e1.pipe(windowToggle(e2, () => e3));
expectObservable(result).toBe(expected);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
expectSubscriptions(e2.subscriptions).toBe(e2subs);
});
});
it('should handle never', () => {
rxTestScheduler.run(({ cold, hot, expectObservable, expectSubscriptions }) => {
const e2 = cold(' --o-----o------o-----o---o-----| ');
const e2subs = ' ^------------------------------! ';
const e3 = cold(' --c-| ');
const e1 = hot(' - ');
const e1subs = ' ^-------------------------------------------!';
const expected = '--u-----v------x-----y---z-------------------';
const u = cold(' --| ');
const v = cold(' --| ');
const x = cold(' --| ');
const y = cold(' --| ');
const z = cold(' --| ');
const unsub = ' --------------------------------------------!';
const values = { u: u, v: v, x, y, z };
const result = e1.pipe(windowToggle(e2, () => e3));
expectObservable(result, unsub).toBe(expected, values);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
expectSubscriptions(e2.subscriptions).toBe(e2subs);
});
});
it('should handle a never opening Observable', () => {
rxTestScheduler.run(({ cold, hot, expectObservable, expectSubscriptions }) => {
const e2 = cold(' - ');
const e2subs = ' ^----------------------------------!';
const e3 = cold(' --c-| ');
const e1 = hot('--a--^---b---c---d---e---f---g---h------|');
const e1subs = ' ^----------------------------------!';
const expected = ' -----------------------------------|';
const result = e1.pipe(windowToggle(e2, () => e3));
expectObservable(result).toBe(expected);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
expectSubscriptions(e2.subscriptions).toBe(e2subs);
});
});
it('should handle a never closing Observable', () => {
rxTestScheduler.run(({ cold, hot, expectObservable, expectSubscriptions }) => {
const e2 = cold(' ---o---------------o-----------| ');
const e2subs = ' ^------------------------------! ';
const e3 = cold(' - ');
// -
const e1 = hot('--a--^---b---c---d---e---f---g---h------|');
const e1subs = ' ^----------------------------------!';
const expected = ' ---x---------------y---------------|';
const x = cold(' -b---c---d---e---f---g---h------|');
const y = cold(' -f---g---h------|');
const values = { x, y };
const result = e1.pipe(windowToggle(e2, () => e3));
expectObservable(result).toBe(expected, values);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
expectSubscriptions(e2.subscriptions).toBe(e2subs);
});
});
it('should handle opening Observable that just throws', () => {
rxTestScheduler.run(({ cold, hot, expectObservable, expectSubscriptions }) => {
const e2 = cold(' # ');
const e2subs = ' (^!) ';
const e3 = cold(' --c-| ');
const subs = ' (^!) ';
const e1 = hot('--a--^---b---c---d---e---f---g---h------|');
const e1subs = ' (^!) ';
const expected = ' # ';
const result = e1.pipe(windowToggle(e2, () => e3));
expectObservable(result).toBe(expected);
expectSubscriptions(e1.subscriptions).toBe(subs);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
expectSubscriptions(e2.subscriptions).toBe(e2subs);
});
});
it('should handle empty closing observable', () => {
rxTestScheduler.run(({ cold, hot, expectObservable, expectSubscriptions }) => {
const e2 = cold(' ---o---------------o-----------| ');
const e2subs = ' ^------------------------------! ';
const e3 = EMPTY;
const e1 = hot('--a--^---b---c---d---e---f---g---h------|');
const e1subs = ' ^----------------------------------!';
const expected = ' ---x---------------y---------------|';
const x = cold(' -b---c---d---e---f---g---h------|');
const y = cold(' -f---g---h------|');
const values = { x, y };
const result = e1.pipe(windowToggle(e2, () => e3));
expectObservable(result).toBe(expected, values);
expectSubscriptions(e1.subscriptions).toBe(e1subs);
expectSubscriptions(e2.subscriptions).toBe(e2subs);
});
});
}); | the_stack |
import * as pulumi from "@pulumi/pulumi";
import { input as inputs, output as outputs } from "../types";
import * as utilities from "../utilities";
/**
* A Workflow Template is a reusable workflow configuration. It defines a graph of jobs with information on where to run those jobs.
*
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const template = new gcp.dataproc.WorkflowTemplate("template", {
* jobs: [
* {
* sparkJob: {
* mainClass: "SomeClass",
* },
* stepId: "someJob",
* },
* {
* prerequisiteStepIds: ["someJob"],
* prestoJob: {
* queryFileUri: "someuri",
* },
* stepId: "otherJob",
* },
* ],
* location: "us-central1",
* placement: {
* managedCluster: {
* clusterName: "my-cluster",
* config: {
* gceClusterConfig: {
* tags: [
* "foo",
* "bar",
* ],
* zone: "us-central1-a",
* },
* masterConfig: {
* diskConfig: {
* bootDiskSizeGb: 15,
* bootDiskType: "pd-ssd",
* },
* machineType: "n1-standard-1",
* numInstances: 1,
* },
* secondaryWorkerConfig: {
* numInstances: 2,
* },
* softwareConfig: {
* imageVersion: "1.3.7-deb9",
* },
* workerConfig: {
* diskConfig: {
* bootDiskSizeGb: 10,
* numLocalSsds: 2,
* },
* machineType: "n1-standard-2",
* numInstances: 3,
* },
* },
* },
* },
* });
* ```
*
* ## Import
*
* WorkflowTemplate can be imported using any of these accepted formats
*
* ```sh
* $ pulumi import gcp:dataproc/workflowTemplate:WorkflowTemplate default projects/{{project}}/locations/{{location}}/workflowTemplates/{{name}}
* ```
*
* ```sh
* $ pulumi import gcp:dataproc/workflowTemplate:WorkflowTemplate default {{project}}/{{location}}/{{name}}
* ```
*
* ```sh
* $ pulumi import gcp:dataproc/workflowTemplate:WorkflowTemplate default {{location}}/{{name}}
* ```
*/
export class WorkflowTemplate extends pulumi.CustomResource {
/**
* Get an existing WorkflowTemplate resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: WorkflowTemplateState, opts?: pulumi.CustomResourceOptions): WorkflowTemplate {
return new WorkflowTemplate(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'gcp:dataproc/workflowTemplate:WorkflowTemplate';
/**
* Returns true if the given object is an instance of WorkflowTemplate. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is WorkflowTemplate {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === WorkflowTemplate.__pulumiType;
}
/**
* Output only. The time template was created.
*/
public /*out*/ readonly createTime!: pulumi.Output<string>;
/**
* (Beta only) Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h", and "d" suffixes for second, minute, hour, and day duration values, respectively. The timeout duration must be from 10 minutes ("10m") to 24 hours ("24h" or "1d"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a (/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster), the cluster is deleted.
*/
public readonly dagTimeout!: pulumi.Output<string | undefined>;
/**
* Required. The Directed Acyclic Graph of Jobs to submit.
*/
public readonly jobs!: pulumi.Output<outputs.dataproc.WorkflowTemplateJob[]>;
/**
* Optional. The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster.
*/
public readonly labels!: pulumi.Output<{[key: string]: string} | undefined>;
/**
* The location for the resource
*/
public readonly location!: pulumi.Output<string>;
/**
* Required. Parameter name. The parameter name is used as the key, and paired with the parameter value, which are passed to the template when the template is instantiated. The name must contain only capital letters (A-Z), numbers (0-9), and underscores (_), and must not start with a number. The maximum length is 40 characters.
*/
public readonly name!: pulumi.Output<string>;
/**
* Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.
*/
public readonly parameters!: pulumi.Output<outputs.dataproc.WorkflowTemplateParameter[] | undefined>;
/**
* Required. WorkflowTemplate scheduling information.
*/
public readonly placement!: pulumi.Output<outputs.dataproc.WorkflowTemplatePlacement>;
/**
* The project for the resource
*/
public readonly project!: pulumi.Output<string>;
/**
* Output only. The time template was last updated.
*/
public /*out*/ readonly updateTime!: pulumi.Output<string>;
/**
* Optional. Used to perform a consistent read-modify-write. This field should be left blank for a `CreateWorkflowTemplate` request. It is required for an `UpdateWorkflowTemplate` request, and must match the current server version. A typical update template flow would fetch the current template with a `GetWorkflowTemplate` request, which will return the current template with the `version` field filled in with the current server version. The user updates other fields in the template, then returns it as part of the `UpdateWorkflowTemplate` request.
*
* @deprecated version is not useful as a configurable field, and will be removed in the future.
*/
public readonly version!: pulumi.Output<number>;
/**
* Create a WorkflowTemplate resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args: WorkflowTemplateArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: WorkflowTemplateArgs | WorkflowTemplateState, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState as WorkflowTemplateState | undefined;
inputs["createTime"] = state ? state.createTime : undefined;
inputs["dagTimeout"] = state ? state.dagTimeout : undefined;
inputs["jobs"] = state ? state.jobs : undefined;
inputs["labels"] = state ? state.labels : undefined;
inputs["location"] = state ? state.location : undefined;
inputs["name"] = state ? state.name : undefined;
inputs["parameters"] = state ? state.parameters : undefined;
inputs["placement"] = state ? state.placement : undefined;
inputs["project"] = state ? state.project : undefined;
inputs["updateTime"] = state ? state.updateTime : undefined;
inputs["version"] = state ? state.version : undefined;
} else {
const args = argsOrState as WorkflowTemplateArgs | undefined;
if ((!args || args.jobs === undefined) && !opts.urn) {
throw new Error("Missing required property 'jobs'");
}
if ((!args || args.location === undefined) && !opts.urn) {
throw new Error("Missing required property 'location'");
}
if ((!args || args.placement === undefined) && !opts.urn) {
throw new Error("Missing required property 'placement'");
}
inputs["dagTimeout"] = args ? args.dagTimeout : undefined;
inputs["jobs"] = args ? args.jobs : undefined;
inputs["labels"] = args ? args.labels : undefined;
inputs["location"] = args ? args.location : undefined;
inputs["name"] = args ? args.name : undefined;
inputs["parameters"] = args ? args.parameters : undefined;
inputs["placement"] = args ? args.placement : undefined;
inputs["project"] = args ? args.project : undefined;
inputs["version"] = args ? args.version : undefined;
inputs["createTime"] = undefined /*out*/;
inputs["updateTime"] = undefined /*out*/;
}
if (!opts.version) {
opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()});
}
super(WorkflowTemplate.__pulumiType, name, inputs, opts);
}
}
/**
* Input properties used for looking up and filtering WorkflowTemplate resources.
*/
export interface WorkflowTemplateState {
/**
* Output only. The time template was created.
*/
createTime?: pulumi.Input<string>;
/**
* (Beta only) Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h", and "d" suffixes for second, minute, hour, and day duration values, respectively. The timeout duration must be from 10 minutes ("10m") to 24 hours ("24h" or "1d"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a (/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster), the cluster is deleted.
*/
dagTimeout?: pulumi.Input<string>;
/**
* Required. The Directed Acyclic Graph of Jobs to submit.
*/
jobs?: pulumi.Input<pulumi.Input<inputs.dataproc.WorkflowTemplateJob>[]>;
/**
* Optional. The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster.
*/
labels?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* The location for the resource
*/
location?: pulumi.Input<string>;
/**
* Required. Parameter name. The parameter name is used as the key, and paired with the parameter value, which are passed to the template when the template is instantiated. The name must contain only capital letters (A-Z), numbers (0-9), and underscores (_), and must not start with a number. The maximum length is 40 characters.
*/
name?: pulumi.Input<string>;
/**
* Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.
*/
parameters?: pulumi.Input<pulumi.Input<inputs.dataproc.WorkflowTemplateParameter>[]>;
/**
* Required. WorkflowTemplate scheduling information.
*/
placement?: pulumi.Input<inputs.dataproc.WorkflowTemplatePlacement>;
/**
* The project for the resource
*/
project?: pulumi.Input<string>;
/**
* Output only. The time template was last updated.
*/
updateTime?: pulumi.Input<string>;
/**
* Optional. Used to perform a consistent read-modify-write. This field should be left blank for a `CreateWorkflowTemplate` request. It is required for an `UpdateWorkflowTemplate` request, and must match the current server version. A typical update template flow would fetch the current template with a `GetWorkflowTemplate` request, which will return the current template with the `version` field filled in with the current server version. The user updates other fields in the template, then returns it as part of the `UpdateWorkflowTemplate` request.
*
* @deprecated version is not useful as a configurable field, and will be removed in the future.
*/
version?: pulumi.Input<number>;
}
/**
* The set of arguments for constructing a WorkflowTemplate resource.
*/
export interface WorkflowTemplateArgs {
/**
* (Beta only) Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h", and "d" suffixes for second, minute, hour, and day duration values, respectively. The timeout duration must be from 10 minutes ("10m") to 24 hours ("24h" or "1d"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a (/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster), the cluster is deleted.
*/
dagTimeout?: pulumi.Input<string>;
/**
* Required. The Directed Acyclic Graph of Jobs to submit.
*/
jobs: pulumi.Input<pulumi.Input<inputs.dataproc.WorkflowTemplateJob>[]>;
/**
* Optional. The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster.
*/
labels?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* The location for the resource
*/
location: pulumi.Input<string>;
/**
* Required. Parameter name. The parameter name is used as the key, and paired with the parameter value, which are passed to the template when the template is instantiated. The name must contain only capital letters (A-Z), numbers (0-9), and underscores (_), and must not start with a number. The maximum length is 40 characters.
*/
name?: pulumi.Input<string>;
/**
* Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.
*/
parameters?: pulumi.Input<pulumi.Input<inputs.dataproc.WorkflowTemplateParameter>[]>;
/**
* Required. WorkflowTemplate scheduling information.
*/
placement: pulumi.Input<inputs.dataproc.WorkflowTemplatePlacement>;
/**
* The project for the resource
*/
project?: pulumi.Input<string>;
/**
* Optional. Used to perform a consistent read-modify-write. This field should be left blank for a `CreateWorkflowTemplate` request. It is required for an `UpdateWorkflowTemplate` request, and must match the current server version. A typical update template flow would fetch the current template with a `GetWorkflowTemplate` request, which will return the current template with the `version` field filled in with the current server version. The user updates other fields in the template, then returns it as part of the `UpdateWorkflowTemplate` request.
*
* @deprecated version is not useful as a configurable field, and will be removed in the future.
*/
version?: pulumi.Input<number>;
} | the_stack |
import {
BitcoinProtocol,
BitcoinProtocolOptions,
IAirGapTransaction,
ProtocolSymbols,
SignedBitcoinSegwitTransaction,
UnsignedTransaction
} from '../..'
import { mnemonicToSeed } from '../../dependencies/src/bip39-2.5.0/index'
import axios from '../../dependencies/src/axios-0.19.0/index'
import { RawBitcoinSegwitTransaction, RawBitcoinTransaction } from '../../serializer-v3/types'
import BigNumber from '../../dependencies/src/bignumber.js-9.0.0/bignumber'
import * as bs58check from '../../dependencies/src/bs58check-2.1.2'
import * as bitcoinJS from 'bitcoinjs-lib'
import { UTXOResponse } from './BitcoinProtocol'
import { MainProtocolSymbols } from '../../utils/ProtocolSymbols'
import { BitcoinSegwitAddress } from './BitcoinSegwitAddress'
import { AirGapTransactionWarning } from '../../interfaces/IAirGapTransaction'
const DUST_AMOUNT = 50
// This function handles arrays and objects
function eachRecursive(obj: Object | Object[]) {
for (var k in obj) {
if (Buffer.isBuffer(obj[k])) {
obj[k] = obj[k].toString('hex')
}
if (typeof obj[k] === 'object' && obj[k] !== null) {
obj[k] = eachRecursive(obj[k])
}
}
return obj
}
// https://github.com/satoshilabs/slips/blob/master/slip-0132.md
class ExtendedPublicKey {
private readonly rawKey: Buffer
constructor(extendedPublicKey: string) {
this.rawKey = bs58check.decode(extendedPublicKey).slice(4)
}
toXpub() {
return this.addPrefix('0488b21e')
}
toYPub() {
return this.addPrefix('049d7cb2')
}
toZPub() {
return this.addPrefix('04b24746')
}
private addPrefix(prefix: string) {
const data = Buffer.concat([Buffer.from(prefix, 'hex'), this.rawKey])
return bs58check.encode(data)
}
}
export class BitcoinSegwitProtocol extends BitcoinProtocol {
public name = 'Bitcoin (Segwit)'
public identifier: ProtocolSymbols = MainProtocolSymbols.BTC_SEGWIT
public standardDerivationPath: string = `m/84'/0'/0'`
public addressPlaceholder: string = 'bc1...'
constructor(options: BitcoinProtocolOptions = new BitcoinProtocolOptions()) {
super(options)
}
public async getPublicKeyFromHexSecret(secret: string, derivationPath: string): Promise<string> {
const bitcoinNode = bitcoinJS.bip32.fromSeed(Buffer.from(secret, 'hex'), this.options.network.extras.network)
const neutered = bitcoinNode.derivePath(derivationPath).neutered()
const zpub = new ExtendedPublicKey(neutered.toBase58()).toZPub()
return zpub
}
public async getPrivateKeyFromHexSecret(secret: string, derivationPath: string): Promise<Buffer> {
if (!derivationPath) {
return bitcoinJS.bip32.fromSeed(Buffer.from(secret, 'hex'), this.options.network.extras.network) as any
}
const bitcoinNode = bitcoinJS.bip32.fromSeed(Buffer.from(secret, 'hex'), this.options.network.extras.network)
const privateKey = bitcoinNode.derivePath(derivationPath).privateKey
if (!privateKey) {
throw new Error('No privatekey!')
}
return privateKey
}
public async getExtendedPrivateKeyFromMnemonic(mnemonic: string, derivationPath: string, password?: string): Promise<string> {
const secret = mnemonicToSeed(mnemonic, password)
return this.getExtendedPrivateKeyFromHexSecret(secret, derivationPath)
}
public async getExtendedPrivateKeyFromHexSecret(secret: string, derivationPath: string): Promise<string> {
const bitcoinNode = bitcoinJS.bip32.fromSeed(Buffer.from(secret, 'hex'), this.options.network.extras.network)
return bitcoinNode.derivePath(derivationPath).toBase58()
}
public async getAddressFromPublicKey(publicKey: string): Promise<BitcoinSegwitAddress> {
// broadcaster knows this (both broadcaster and signer)
return BitcoinSegwitAddress.fromAddress(bitcoinJS.bip32.fromBase58(publicKey, this.options.network.extras.network).toBase58())
}
public async getAddressesFromPublicKey(publicKey: string): Promise<BitcoinSegwitAddress[]> {
return [await this.getAddressFromPublicKey(publicKey)]
}
public async getAddressFromExtendedPublicKey(
extendedPublicKey: string,
visibilityDerivationIndex: number,
addressDerivationIndex: number
): Promise<BitcoinSegwitAddress> {
const xpub = new ExtendedPublicKey(extendedPublicKey).toXpub()
// broadcaster knows this (both broadcaster and signer)
const keyPair = bitcoinJS.bip32
.fromBase58(xpub, this.options.network.extras.network)
.derive(visibilityDerivationIndex)
.derive(addressDerivationIndex)
const obj = bitcoinJS.payments.p2wpkh({ pubkey: keyPair.publicKey })
const { address } = obj
if (!address) {
throw new Error('could not generate address')
}
return BitcoinSegwitAddress.fromAddress(address)
}
public getAddressesFromExtendedPublicKey(
extendedPublicKey: string,
visibilityDerivationIndex: number,
addressCount: number,
offset: number
): Promise<BitcoinSegwitAddress[]> {
// broadcaster knows this (both broadcaster and signer)
const node = bitcoinJS.bip32.fromBase58(new ExtendedPublicKey(extendedPublicKey).toXpub(), this.options.network.extras.network)
const generatorArray = Array.from(new Array(addressCount), (x, i) => i + offset)
return Promise.all(
generatorArray.map((x) => {
const keyPair = node.derive(visibilityDerivationIndex).derive(x)
const { address } = bitcoinJS.payments.p2wpkh({ pubkey: keyPair.publicKey })
if (!address) {
throw new Error('could not generate address')
}
return BitcoinSegwitAddress.fromAddress(address)
})
)
}
public async getTransactionDetails(unsignedTx: UnsignedTransaction): Promise<IAirGapTransaction[]> {
// out of public information (both broadcaster and signer)
const transaction = unsignedTx.transaction as RawBitcoinSegwitTransaction
const decodedPSBT = bitcoinJS.Psbt.fromHex(transaction.psbt)
let feeCalculator = new BigNumber(0)
for (const txIn of decodedPSBT.data.inputs) {
feeCalculator = feeCalculator.plus(new BigNumber(txIn.witnessUtxo?.value ?? 0))
}
for (const txOut of decodedPSBT.txOutputs) {
feeCalculator = feeCalculator.minus(new BigNumber(txOut.value))
}
const warnings: AirGapTransactionWarning[] = []
const clonedPSBT = decodedPSBT.clone()
eachRecursive(clonedPSBT) // All buffers to hex string
// Amount is tricky for BTC. Full amount of inputs is always transferred, but usually a (big) part of the amount is sent back to the sender via change address. So it's not easy to determine what the amount is that the user intended to send.
const amount: BigNumber = (() => {
// If tx has only one output, this is the amount
if (decodedPSBT.txOutputs.length === 1) {
return new BigNumber(decodedPSBT.txOutputs[0].value)
}
// If we can match one output to an exact amount that we add to the PSBT, this is our amount.
{
const unknownKeyVals = decodedPSBT.data.globalMap.unknownKeyVals
if (unknownKeyVals) {
const amountArray = unknownKeyVals.filter((kv) => kv.key.equals(Buffer.from('amount')))
if (amountArray.length > 0) {
return new BigNumber(amountArray[0].value.toString()) // Buffer to number
}
}
}
// If tx has an output and we added a derivation path in the PSBT (in the wallet, prepareTransaction), we know that it is a change address and we ignore it.
let accumulated = new BigNumber(0)
let useAccumulated = false
decodedPSBT.data.outputs.forEach((outputKeyValues, index) => {
if (outputKeyValues.unknownKeyVals) {
const derivationPaths = outputKeyValues.unknownKeyVals
.filter((kv) => kv.key.equals(Buffer.from('dp')))
.map((kv) => kv.value.toString())
if (derivationPaths.length > 0) {
// If one of the outputs has the derivation in the custom key/value map, we can use this to determine the amount.
useAccumulated = true
return
}
}
const output = decodedPSBT.txOutputs[index]
accumulated = accumulated.plus(output.value)
})
if (useAccumulated) {
return accumulated
}
// If we cannot match anything above, we need to assume that the whole amount is being sent and the user has to check the outputs.
return decodedPSBT.txOutputs
.map((obj) => new BigNumber(obj.value))
.reduce((accumulator, currentValue) => accumulator.plus(currentValue))
})()
return [
{
from: decodedPSBT.data.inputs.map(
(obj) =>
obj.bip32Derivation
?.map(
(el) =>
bitcoinJS.payments.p2wpkh({
pubkey: el.pubkey,
network: bitcoinJS.networks.bitcoin
}).address
)
.join(' ') ?? 'INVALID'
),
to: decodedPSBT.txOutputs.map((obj) => {
return obj.address || `Script: ${obj.script.toString('hex')}` || 'unknown'
}),
amount: amount.toString(10),
fee: feeCalculator.toString(10),
protocolIdentifier: this.identifier,
network: this.options.network,
isInbound: false,
transactionDetails: {
// This is some unstructured data about the PSBT. This is shown in the UI as a JSON for advanced users.
inputTx: eachRecursive(clonedPSBT.txInputs),
outputTx: eachRecursive(clonedPSBT.txOutputs),
inputData: clonedPSBT.data.inputs,
outputData: clonedPSBT.data.outputs,
PSBTVersion: clonedPSBT.version,
PSBTLocktime: clonedPSBT.locktime,
PSBTGlobalMap: clonedPSBT.data.globalMap,
rawPSBT: unsignedTx.transaction
},
warnings
}
]
}
public async getTransactionDetailsFromSigned(signedTx: SignedBitcoinSegwitTransaction): Promise<IAirGapTransaction[]> {
return this.getTransactionDetails({ publicKey: '', transaction: { psbt: signedTx.transaction } })
}
public async prepareTransactionFromExtendedPublicKey(
extendedPublicKey: string,
offset: number,
recipients: string[],
values: string[],
fee: string,
extras: {
masterFingerprint: string
replaceByFee: boolean
}
): Promise<any> {
if (!extras.masterFingerprint) {
throw new Error('MasterFingerprint not set!')
}
const wrappedValues: BigNumber[] = values.map((value: string) => new BigNumber(value))
const wrappedFee: BigNumber = new BigNumber(fee)
const transaction: RawBitcoinTransaction = {
ins: [],
outs: []
}
if (recipients.length !== wrappedValues.length) {
throw new Error('recipients do not match values')
}
const { data: utxos }: { data: UTXOResponse[] } = await axios.get<UTXOResponse[]>(
`${this.options.network.extras.indexerApi}/api/v2/utxo/${extendedPublicKey}?confirmed=true`,
{
responseType: 'json'
}
)
if (utxos.length <= 0) {
throw new Error('not enough balance') // no transactions found on those addresses, probably won't find anything in the next ones
}
const totalRequiredBalance: BigNumber = wrappedValues
.reduce((accumulator: BigNumber, currentValue: BigNumber) => accumulator.plus(currentValue))
.plus(wrappedFee)
let valueAccumulator: BigNumber = new BigNumber(0)
const getPathIndexes = (path: string): [number, number] => {
const result = path
.split('/')
.slice(-2)
.map((item) => parseInt(item))
.filter((item) => !isNaN(item))
if (result.length !== 2) {
throw new Error('Unexpected path format')
}
return [result[0], result[1]]
}
for (const utxo of utxos) {
valueAccumulator = valueAccumulator.plus(utxo.value)
const indexes: [number, number] = getPathIndexes(utxo.path)
const derivedAddress: string = (await this.getAddressFromExtendedPublicKey(extendedPublicKey, indexes[0], indexes[1])).getValue()
if (derivedAddress === utxo.address) {
transaction.ins.push({
txId: utxo.txid,
value: new BigNumber(utxo.value).toString(10),
vout: utxo.vout,
address: utxo.address,
derivationPath: utxo.path
})
} else {
throw new Error('Invalid address returned from API')
}
if (valueAccumulator.isGreaterThanOrEqualTo(totalRequiredBalance)) {
break
}
}
if (valueAccumulator.isLessThan(totalRequiredBalance)) {
throw new Error('not enough balance 2')
}
for (let i = 0; i < recipients.length; i++) {
transaction.outs.push({
recipient: recipients[i],
isChange: false,
value: wrappedValues[i].toString(10)
})
valueAccumulator = valueAccumulator.minus(wrappedValues[i])
}
const lastUsedInternalAddress: number = Math.max(
-1,
...utxos
.map((utxo: UTXOResponse) => getPathIndexes(utxo.path))
.filter((indexes: [number, number]) => indexes[0] === 1)
.map((indexes: [number, number]) => indexes[1])
)
// If the change is considered dust, the transaction will fail.
// Dust is a variable value around 300-600 satoshis, depending on the configuration.
// We set a low fee here to not block any transactions, but it might still fail due to "dust".
const changeValue: BigNumber = valueAccumulator.minus(wrappedFee)
if (changeValue.isGreaterThan(new BigNumber(DUST_AMOUNT))) {
const changeAddressIndex: number = lastUsedInternalAddress + 1
const derivedAddress: string = (await this.getAddressFromExtendedPublicKey(extendedPublicKey, 1, changeAddressIndex)).getValue()
transaction.outs.push({
recipient: derivedAddress,
isChange: true,
value: changeValue.toString(10),
derivationPath: `1/${changeAddressIndex}`
})
}
const psbt = new bitcoinJS.Psbt()
// We add the total amount of the transaction to the global map. This can be used to show the info in the "from-to" component after the transaction was signed.
psbt.addUnknownKeyValToGlobal({
key: Buffer.from('amount'),
value: Buffer.from(
wrappedValues.reduce((accumulator: BigNumber, currentValue: BigNumber) => accumulator.plus(currentValue)).toString()
)
})
const keyPair = bitcoinJS.bip32.fromBase58(new ExtendedPublicKey(extendedPublicKey).toXpub())
const replaceByFee: boolean = extras.replaceByFee ? true : false
transaction.ins.forEach((tx) => {
const indexes: [number, number] = getPathIndexes(tx.derivationPath!)
const childNode = keyPair.derivePath(indexes.join('/'))
const p2wpkh = bitcoinJS.payments.p2wpkh({ pubkey: childNode.publicKey, network: this.options.network.extras.network })
const p2shOutput = p2wpkh.output
if (!p2shOutput) {
throw new Error('no p2shOutput')
}
psbt.addInput({
hash: tx.txId,
index: tx.vout,
sequence: replaceByFee ? 0xfffffffd : undefined, // Needs to be at least 2 below max int value to be RBF
witnessUtxo: {
script: p2shOutput,
value: parseInt(tx.value)
},
bip32Derivation: [
{
masterFingerprint: Buffer.from(extras.masterFingerprint, 'hex'),
pubkey: childNode.publicKey,
path: tx.derivationPath!
}
]
})
})
transaction.outs.forEach((out, index) => {
psbt.addOutput({ address: out.recipient, value: parseInt(out.value) })
if (out.derivationPath) {
// We add the derivation path of our change address to the key value map of the PSBT. This will allow us to later "filter" out this address when displaying the transaction info.
psbt.addUnknownKeyValToOutput(index, {
key: Buffer.from('dp'),
value: Buffer.from(out.derivationPath, 'utf8')
})
}
})
const tx: RawBitcoinSegwitTransaction = {
psbt: psbt.toHex()
}
return tx
}
public async signWithExtendedPrivateKey(extendedPrivateKey: string, transaction: any /* RawBitcoinSegwitTransaction */): Promise<string> {
const rawBitcoinSegwitTx: RawBitcoinSegwitTransaction = transaction
const bip32PK = bitcoinJS.bip32.fromBase58(extendedPrivateKey)
const decodedPSBT = bitcoinJS.Psbt.fromHex(rawBitcoinSegwitTx.psbt)
decodedPSBT.data.inputs.forEach((input, index) => {
input.bip32Derivation?.forEach((deriv) => {
try {
// This uses the same logic to find child key as the "findWalletByFingerprintDerivationPathAndProtocolIdentifier" method in the Vault
const cutoffFrom = deriv.path.lastIndexOf("'") || deriv.path.lastIndexOf('h')
const childPath = deriv.path.substr(cutoffFrom + 2)
decodedPSBT.signInput(index, bip32PK.derivePath(childPath))
console.log(`Signed input ${index} with path ${deriv.path}`)
} catch (e) {
console.log(`Error signing input ${index}`, e)
}
})
})
return decodedPSBT.toHex()
}
public async broadcastTransaction(rawTransaction: string): Promise<string> {
const hexTransaction = bitcoinJS.Psbt.fromHex(rawTransaction).finalizeAllInputs().extractTransaction().toHex()
return super.broadcastTransaction(hexTransaction)
}
} | the_stack |
import { Spreadsheet } from '../base/index';
import { applyMerge, activeCellMergedRange, MergeArgs } from '../../workbook/common/index';
import { ICellRenderer, hiddenMerge, dialog, locale, CellRenderArgs, focus } from '../common/index';
import { checkPrevMerge, checkMerge, DialogBeforeOpenEventArgs } from '../common/index';
import { Dialog } from '../services/index';
import { CellModel, getCell, SheetModel, isHiddenCol, isHiddenRow } from '../../workbook/index';
import { L10n } from '@syncfusion/ej2-base';
import { BeforeOpenEventArgs } from '@syncfusion/ej2-popups';
/**
* The `Merge` module is used to to merge the range of cells.
*/
export class Merge {
private parent: Spreadsheet;
/**
* Constructor for the Spreadsheet merge module.
*
* @param {Spreadsheet} parent - Specify the spreadsheet.
* @private
*/
constructor(parent: Spreadsheet) {
this.parent = parent;
this.addEventListener();
}
private merge(args: { rowIdx?: number, colIdx?: number, showDialog?: boolean, lastCell?: boolean, element?: Element }): void {
if (args.showDialog) {
(this.parent.serviceLocator.getService(dialog) as Dialog).show({
target: this.parent.element,
height: 180, width: 400, isModal: true, showCloseIcon: true,
content: (this.parent.serviceLocator.getService(locale) as L10n).getConstant('PasteMergeAlert'),
beforeOpen: (args: BeforeOpenEventArgs): void => {
const dlgArgs: DialogBeforeOpenEventArgs = {
dialogName: 'MergeDialog',
element: args.element, target: args.target, cancel: args.cancel
};
this.parent.trigger('dialogBeforeOpen', dlgArgs);
if (dlgArgs.cancel) {
args.cancel = true;
}
focus(this.parent.element);
}
});
return;
}
(this.parent.serviceLocator.getService('cell') as ICellRenderer).refresh(args.rowIdx, args.colIdx, args.lastCell, args.element);
}
private hideHandler(args: { rowIdx: number, colIdx: number, model: string, start: number, end: number, isEnd?: boolean,
hide: boolean }): void {
const sheet: SheetModel = this.parent.getActiveSheet();
const mergeArgs: MergeArgs = { range: [args.rowIdx, args.colIdx, args.rowIdx, args.colIdx] };
this.parent.notify(activeCellMergedRange, mergeArgs); mergeArgs.range = mergeArgs.range as number[];
const cell: CellModel = getCell(mergeArgs.range[0], mergeArgs.range[1], sheet) || {};
const startIdx: number = args.model === 'row' ? mergeArgs.range[0] : mergeArgs.range[1];
const endIdx: number = startIdx + ((cell[`${args.model}Span`] || 1) - 1);
if ((!args.isEnd && (args.start === startIdx || isHiddenCol(sheet, startIdx))) || (args.isEnd && (args.start > startIdx &&
!isHiddenCol(sheet, startIdx)))) { return; }
if (cell[`${args.model}Span`] > 1 && endIdx >= args.start) {
if (args.model === 'row' ? isHiddenRow(sheet, startIdx) : isHiddenCol(sheet, startIdx)) {
if (args.end < endIdx && (endIdx - args.end > 1 || cell.rowSpan > 1)) {
const cellEle: HTMLTableCellElement = this.parent.getCell(args.rowIdx, args.end + 1) as HTMLTableCellElement;
if (cellEle) {
if (endIdx - args.end > 1) { cellEle.colSpan = endIdx - args.end; }
cellEle.style.display = '';
}
if (cell.rowSpan > 1) {
const rowSpan: number = cell.rowSpan - this.parent.hiddenCount(args.rowIdx, args.rowIdx + (cell.rowSpan - 1));
if (rowSpan > 1) { cellEle.rowSpan = rowSpan; }
}
}
} else {
this.merge({ rowIdx: mergeArgs.range[0], colIdx: mergeArgs.range[1] });
}
}
}
private checkPrevMerge(args: CellRenderArgs): void {
let cell: CellModel; const sheet: SheetModel = this.parent.getActiveSheet(); let mergeArgs: MergeArgs; let mergeCount: number;
if (args.isRow) {
if (args.rowIdx - 1 > -1 && isHiddenRow(sheet, args.rowIdx - 1)) {
cell = getCell(args.rowIdx - 1, args.colIdx, sheet) || {};
if ((cell.rowSpan !== undefined || cell.colSpan !== undefined) && (cell.colSpan === undefined || cell.colSpan > 1 ||
(args.colIdx - 1 > -1 && isHiddenCol(sheet, args.colIdx - 1)))) {
mergeArgs = { range: [args.rowIdx - 1, args.colIdx, args.rowIdx - 1, args.colIdx] };
this.parent.notify(activeCellMergedRange, mergeArgs); mergeArgs.range = mergeArgs.range as number[];
if (isHiddenRow(sheet, mergeArgs.range[0]) && mergeArgs.range[2] >= args.rowIdx) {
cell = getCell(mergeArgs.range[0], mergeArgs.range[1], sheet) || {};
if (cell.rowSpan > 1) {
mergeCount = (mergeArgs.range[2] - args.rowIdx) + 1 - this.parent.hiddenCount(
args.rowIdx, args.rowIdx + (mergeArgs.range[2] - args.rowIdx));
if (mergeCount > 1) {
args.td.rowSpan = mergeCount; args.td.style.display = '';
}
if (cell.colSpan > 1 && !(args.colIdx - 1 > -1 && isHiddenCol(sheet, args.colIdx - 1))) {
mergeCount = cell.colSpan - this.parent.hiddenCount(
args.colIdx, args.colIdx + (cell.colSpan - 1), 'columns');
if (mergeCount > 1) { args.td.colSpan = mergeCount; args.td.style.display = ''; }
}
}
}
}
}
return;
}
if (args.colIdx - 1 > -1 && isHiddenCol(sheet, args.colIdx - 1)) {
cell = getCell(args.rowIdx, args.colIdx - 1, sheet) || {};
if ((cell.colSpan !== undefined || cell.rowSpan !== undefined) && (cell.rowSpan === undefined || cell.rowSpan > 1 ||
(args.rowIdx - 1 > -1 && isHiddenRow(sheet, args.rowIdx - 1)))) {
mergeArgs = { range: [args.rowIdx, args.colIdx - 1, args.rowIdx, args.colIdx - 1] };
this.parent.notify(activeCellMergedRange, mergeArgs); mergeArgs.range = mergeArgs.range as number[];
if (isHiddenCol(sheet, mergeArgs.range[1]) && mergeArgs.range[3] >= args.colIdx) {
cell = getCell(mergeArgs.range[0], mergeArgs.range[1], sheet) || {};
if (cell.colSpan > 1) {
mergeCount = (mergeArgs.range[3] - args.colIdx) + 1 - this.parent.hiddenCount(
args.colIdx, args.colIdx + (cell.colSpan - 1), 'columns');
if (mergeCount > 1) {
args.td.colSpan = mergeCount; args.td.style.display = '';
}
if (cell.rowSpan > 1 && !(args.rowIdx - 1 > -1 && isHiddenRow(sheet, args.rowIdx - 1))) {
mergeCount = cell.rowSpan - this.parent.hiddenCount(args.rowIdx, args.rowIdx + (cell.rowSpan - 1));
if (mergeCount > 1) { args.td.rowSpan = mergeCount; args.td.style.display = ''; }
}
}
}
}
}
}
private checkMerge(args: CellRenderArgs): void {
const sheet: SheetModel = this.parent.getActiveSheet();
let cell: CellModel = getCell(args.rowIdx, args.colIdx, sheet) || {};
if (args.isRow) {
if (cell.colSpan === undefined || isHiddenCol(sheet, args.colIdx - 1)) {
const mergeArgs: MergeArgs = { range: [args.rowIdx, args.colIdx, args.rowIdx, args.colIdx] };
mergeArgs.range = mergeArgs.range as number[];
this.parent.notify(activeCellMergedRange, mergeArgs);
if ((isHiddenCol(sheet, args.colIdx - 1) && !isHiddenCol(sheet, mergeArgs.range[1])) || (args.isFreezePane &&
mergeArgs.range[0] < this.parent.frozenRowCount(sheet))) {
args.insideFreezePane = mergeArgs.range[0] < this.parent.frozenRowCount(sheet); return;
}
cell = getCell(mergeArgs.range[0], mergeArgs.range[1], sheet);
const mergeCount: number = (mergeArgs.range[2] - args.rowIdx) + 1 -
this.parent.hiddenCount(args.rowIdx, mergeArgs.range[2]);
if (mergeCount > 1) {
this.merge({ rowIdx: mergeArgs.range[0], colIdx: mergeArgs.range[1], element: args.td });
args.td.rowSpan = mergeCount;
args.td.style.display = '';
}
}
} else {
if (cell.rowSpan === undefined || isHiddenRow(sheet, args.rowIdx - 1)) {
const mergeArgs: MergeArgs = { range: [args.rowIdx, args.colIdx, args.rowIdx, args.colIdx] };
mergeArgs.range = mergeArgs.range as number[];
this.parent.notify(activeCellMergedRange, mergeArgs);
if ((isHiddenRow(sheet, args.rowIdx - 1) && !isHiddenRow(sheet, mergeArgs.range[0])) || (args.isFreezePane &&
mergeArgs.range[1] < this.parent.frozenColCount(sheet))) {
args.insideFreezePane = mergeArgs.range[1] < this.parent.frozenColCount(sheet); return;
}
cell = getCell(mergeArgs.range[0], mergeArgs.range[1], sheet);
const mergeCount: number = (mergeArgs.range[3] - args.colIdx) + 1 - this.parent.hiddenCount(
args.colIdx, mergeArgs.range[3], 'columns');
if (mergeCount > 1) {
this.merge({ rowIdx: mergeArgs.range[0], colIdx: mergeArgs.range[1], element: args.td });
args.td.colSpan = mergeCount;
args.td.style.display = '';
}
}
}
}
private addEventListener(): void {
this.parent.on(applyMerge, this.merge, this);
this.parent.on(hiddenMerge, this.hideHandler, this);
this.parent.on(checkPrevMerge, this.checkPrevMerge, this);
this.parent.on(checkMerge, this.checkMerge, this);
}
/**
* Destroy merge module.
*
* @returns {void} - Destroy merge module.
*/
public destroy(): void {
this.removeEventListener();
this.parent = null;
}
private removeEventListener(): void {
if (!this.parent.isDestroyed) {
this.parent.off(applyMerge, this.merge);
this.parent.off(hiddenMerge, this.hideHandler);
this.parent.off(checkPrevMerge, this.checkPrevMerge);
this.parent.off(checkMerge, this.checkMerge);
}
}
/**
* Get the merge module name.
*
* @returns {string} - Get the merge module name.
*/
public getModuleName(): string {
return 'merge';
}
} | the_stack |
export const templates = {
// .editorconfig
'.editorconfig': `
[*]
indent_size = 2
tab_width = 2
`,
// .gitignore
'.gitignore': `
# Cache and log files
.DS_Store
yarn-error.log
# Dependencies
node_modules
# Build files
dist
# Generated files
src/options.ts
src/schema.ts
`,
// .prettierignore
'.prettierignore': `
package.json
`,
// .prettierrc
'.prettierrc': `
{
"printWidth": 120,
"singleQuote": true,
"proseWrap": "always"
}
`,
// LICENSE
LICENSE: `
MIT License
Copyright (c) {{year}} Florian Körner
{{#if contributor}}
Copyright (c) {{year}} {{{contributor}}}
{{/if}}
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
`,
// readme.md
'README.md': `
<h1 align="center"><img src="./tests/svg/0.svg" width="124" /> <br />{{title}}</h1>
<p align="center">
<strong>Avatar Style for <a href="https://dicebear.com/">DiceBear Avatars</a>.</strong><br />
{{#if source}}
<a href="{{source}}">{{title}}</a>
{{else}}
{{title}}
{{/if}}
{{#if creator}}
by {{creator}}
{{/if}}
{{#if licenseName}}
licensed under
{{#if licenseUrl}}
<a href="{{licenseUrl}}">{{licenseName}}</a>.
{{else}}
{{licenseName}}
{{/if}}
{{/if}}
</p>
{{#unless isMitLicensed}}
<p align="center">
While our code is MIT licensed, the design is licensed under
{{#if licenseUrl}}
<a href="{{licenseUrl}}">{{licenseName}}</a>.
{{else}}
{{licenseName}}.
{{/if}}
See <a href="https://dicebear.com/licenses">license overview</a> for more information.
</p>
{{/unless}}
----
## Usage
Install the DiceBear package and this Avatar styles with the following command.
\`\`\`
npm install @dicebear/avatars {{packageName}} --save
\`\`\`
Now you are ready to create your first Avatar.
\`\`\`js
import { createAvatar } from '@dicebear/avatars';
import * as style from '{{packageName}}';
let svg = createAvatar(style, {
seed: 'custom-seed',
// ... and other options
});
\`\`\`
## Options
All [options from DiceBear](https://avatars.dicebear.com/docs/options) and additionally the following:
{{#each properties}}
### {{@key}}
type: \`{{this.type}}\`
{{#if this.items.enum}}
allowed: {{#each this.items.enum}}{{#if @key}}, {{/if}}\`{{this}}\`{{/each}}
{{/if}}
{{#if this.items.[0].enum}}
allowed: {{#each this.items.enum}}{{#if @key}}, {{/if}}\`{{this}}\`{{/each}} or an \`hex color\`
{{/if}}
{{#if this.default.[0]}}
default: \`[{{#each this.default}}{{#if @key}}, {{/if}}'{{this}}'{{/each}}]\`
{{else}}
default: \`{{this.default}}\`
{{/if}}
{{/each}}
## Build this package
\`\`\`
npm run build
\`\`\`
## Test this package
\`\`\`
npm run test
\`\`\`
`,
// jest.config.js
'jest.config.js': `
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
};
`,
// package.json
'package.json': `
{
"name": "{{{packageName}}}",
"version": "{{{packageVersion}}}",
"description": "Avatar style for DiceBear",
"keywords": ["dicebear"],
"bugs": {
"url": "https://github.com/dicebear/dicebear/issues"
},
"repository": {
"type": "git",
"url": "git+https://github.com/dicebear/dicebear.git"
},
"license": "MIT",
"source": "src/index.ts",
"main": "dist/index.js",
"module": "dist/index.es.js",
"browser": "dist/index.umd.js",
"types": "dist/index.d.ts",
"files": [
"LICENSE",
"README.md",
"dist"
],
"scripts": {
"test": "jest",
"prepublishOnly": "npm run build",
"prebuild": "shx rm -rf dist",
"build": "dicebear-project build {{umdName}}"
},
"dependencies": {},
"devDependencies": {
"@dicebear/avatars": "^4.7.4",
"@tsconfig/recommended": "^1.0.0",
"@types/jest": "^26.0.22",
"@types/node": "^10.11.6",
"dicebear-project": "^4.7.4",
"jest": "^26.6.3",
"shx": "^0.3.3",
"ts-jest": "^26.5.4",
"typescript": "^4.2.3",
"utility-types": "^3.10.0"
},
"peerDependencies": {
"@dicebear/avatars": "^4.6.0"
},
"publishConfig": {
"access": "public"
}
}
`,
// tsconfig.json
'tsconfig.json': `
{
"extends": "@tsconfig/recommended/tsconfig.json",
"compilerOptions": {
"declaration": true,
"outDir": "./dist",
},
"include": ["./src/**/*.ts"]
}
`,
// tests/create.test.ts
'tests/create.test.ts': `
import * as avatars from '@dicebear/avatars';
import { StyleOptions } from '@dicebear/avatars';
import * as style from '../dist';
import * as fs from 'fs';
import * as path from 'path';
const data: Array<StyleOptions<style.Options>> = [
{ seed: 'John Doe' },
{ seed: 'Jane Doe' },
{ seed: 'Florian' },
{ seed: 'Aneka' },
{ seed: 'Felix' },
];
data.forEach((options, key) => {
test(\`Create avatar #$\{key}\`, async () => {
const svgComponent = path.resolve(__dirname, 'svg', \`$\{key}.svg\`);
if (false === fs.existsSync(svgComponent)) {
if (false === fs.existsSync(path.dirname(svgComponent))) {
fs.mkdirSync(path.dirname(svgComponent));
}
fs.writeFileSync(svgComponent, avatars.createAvatar(style, options), { encoding: 'utf-8' });
}
const svg = fs.readFileSync(svgComponent, { encoding: 'utf-8' });
expect(avatars.createAvatar(style, options)).toEqual(svg);
expect(new avatars.default(style.default, options).create(options.seed)).toEqual(svg);
});
});
`,
// src/index.ts
'src/index.ts': `
/*!
* {{{title}}} ({{packageName}})
*
* Code licensed under MIT License.
* Copyright (c) {{year}} Florian Körner
{{#if contributor}}
* Copyright (c) {{year}} {{{contributor}}}
{{/if}}
*
* Design "{{{title}}}"{{#if creator}} by {{{creator}}}{{/if}}{{#if licenseName}} licensed under {{{licenseName}}}{{/if}}.
{{#if source}}
* Source: {{{source}}}
{{/if}}
{{#if licenseUrl}}
* License: {{{licenseUrl}}}
{{/if}}
*/
import { utils } from '@dicebear/avatars';
import { style } from './core';
let { create, meta, schema } = style;
export { create, meta, schema };
export { Options } from './options';
/** @deprecated will be removed in Version 5.0 */
export default utils.style.createLegacyWrapper(style);
`,
// src/core.ts
'src/core.ts': `
import type { Style } from '@dicebear/avatars';
import type { Options } from './options';
import type { ComponentPickCollection, ColorPickCollection } from './static-types';
import { schema } from './schema';
import { pickComponent } from './utils/pickComponent';
import { pickColor } from './utils/pickColor';
import { onPreCreate } from './hooks/onPreCreate';
import { onPostCreate } from './hooks/onPostCreate';
export const style: Style<Options> = {
meta: {
title: '{{{title}}}',
{{#if creator}}
creator: '{{{creator}}}',
{{/if}}
{{#if contributor}}
contributor: '{{{contributor}}}',
{{/if}}
{{#if source}}
source: '{{{source}}}',
{{/if}}
{{#if licenseName}}
license: {
name: '{{{licenseName}}}',
{{#if licenseUrl}}
url: '{{{licenseUrl}}}',
{{/if}}
},
{{/if}}
},
schema,
create: ({ prng, options }) => {
onPreCreate({ prng, options });
{{#each components}}
const {{@key}}Component = pickComponent(prng, '{{@key}}', options.{{@key}});
{{/each}}
const components: ComponentPickCollection = {
{{#each components}}
{{#isNull this.settings.probability.length}}
'{{@key}}': {{@key}}Component,
{{else}}
'{{@key}}': prng.bool(options.{{@key}}Probability) ? {{@key}}Component : undefined,
{{/isNull}}
{{/each}}
}
const colors: ColorPickCollection = {
{{#each colors}}
{{#if this.isUsedByComponents}}
'{{@key}}': pickColor(prng, '{{@key}}', options.{{@key}}Color ?? []),
{{/if}}
{{/each}}
}
{{#if backgroundColorGroupName}}
const backgroundColor = typeof options.backgroundColor === 'string' ? [options.backgroundColor] : options.backgroundColor;
options.backgroundColor = pickColor(prng, '{{backgroundColorGroupName}}', backgroundColor ?? []).value;
{{/if}}
onPostCreate({ prng, options, components, colors });
return {
attributes: {
viewBox: '0 0 {{size}} {{size}}',
fill: 'none',
},
body: {{{body}}},
};
},
};
`,
// src/static-types.ts
'src/static-types.ts': `
export type ColorGroup = Record<string, ColorGroupItem>;
export type ColorGroupCollection = Record<string, ColorGroup>;
export type ColorGroupItem = string;
export type ColorPickCollection = Record<string, ColorPick>;
export type ColorPick = {
name: string;
value: ColorGroupItem;
};
export type ComponentGroup = Record<string, ComponentGroupItem>;
export type ComponentGroupCollection = Record<string, ComponentGroup>;
export type ComponentGroupItem = (components: ComponentPickCollection, colors: ColorPickCollection) => string;
export type ComponentPickCollection = Record<string, ComponentPick>;
export type ComponentPick =
| {
name: string;
value: ComponentGroupItem;
}
| undefined;
`,
// src/colors/index.ts
'src/colors/index.ts': `
{{#each colors}}
{{#if this.isUsedByComponents}}
export { {{@key}} } from './{{@key}}';
{{else}}
{{#isEqual @key ../backgroundColorGroupName}}
export { {{@key}} } from './{{@key}}';
{{/isEqual}}
{{/if}}
{{/each}}
`,
// src/colors/{{name}}.ts
'src/colors/{{name}}.ts': `
import type { ColorGroup } from "../static-types";
export const {{name}}: ColorGroup = {
{{#each colors}}
'{{@key}}': 'rgba({{this.value.r}}, {{this.value.g}}, {{this.value.b}}, {{this.value.a}})',
{{/each}}
}
`,
// src/components/index.ts
'src/components/index.ts': `
{{#each components}}
export { {{@key}} } from './{{@key}}';
{{/each}}
`,
// src/components/{{name}}.ts
'src/components/{{name}}.ts': `
import type { ComponentGroup, ComponentPickCollection, ColorPickCollection } from '../static-types';
export const {{name}}: ComponentGroup = {
{{#each components}}
'{{@key}}': (components: ComponentPickCollection, colors: ColorPickCollection) => {{{this}}},
{{/each}}
}
`,
// src/utils/pickColor.ts
'src/utils/pickColor.ts': `
import type { Prng } from '@dicebear/avatars';
import type { ColorGroupCollection, ColorPick } from '../static-types';
import * as colors from '../colors';
export function pickColor(prng: Prng, group: string, values: string[]): ColorPick {
const colorCollection: ColorGroupCollection = colors;
if (values.length === 0) {
values.push('transparent');
}
const key = prng.pick(values);
return {
name: key,
value: colorCollection[group][key] ?? key,
};
}
`,
// src/utils/pickComponent.ts
'src/utils/pickComponent.ts': `
import type { Prng } from '@dicebear/avatars';
import type { ComponentGroupCollection, ComponentPick } from '../static-types';
import * as components from '../components';
export function pickComponent(prng: Prng, group: string, values: string[] = []): ComponentPick {
const componentCollection: ComponentGroupCollection = components;
const key = prng.pick(values);
if (componentCollection[group][key]) {
return {
name: key,
value: componentCollection[group][key],
};
} else {
return undefined;
}
}
`,
// src/hooks/onPreCreate.ts
'src/hooks/onPreCreate.ts': `
import { Prng, StyleOptions } from "@dicebear/avatars";
import { Options } from "../options";
type Props = { prng: Prng, options: StyleOptions<Options> }
export function onPreCreate({ prng, options }: Props) {
{{#if content}}
{{{content}}}
{{else}}
// Write your modifications here
{{/if}}
}
`,
// src/hooks/onPostCreate.ts
'src/hooks/onPostCreate.ts': `
import { Prng, StyleOptions } from "@dicebear/avatars";
import { Options } from "../options";
import { ColorPickCollection, ComponentPickCollection } from "../static-types";
type Props = { prng: Prng, options: StyleOptions<Options>, components: ComponentPickCollection, colors: ColorPickCollection }
export function onPostCreate({ prng, options, components, colors }: Props) {
{{#if content}}
{{{content}}}
{{else}}
// Write your modifications here
{{/if}}
}
`,
}; | the_stack |
import React, { useState } from 'react';
import { Components, registerComponent } from '../../lib/vulcan-lib';
import { useUpdateCurrentUser } from '../hooks/useUpdateCurrentUser';
import { Link } from '../../lib/reactRouterWrapper';
import NoSSR from 'react-no-ssr';
import Headroom from '../../lib/react-headroom'
import { useTheme } from '../themes/useTheme';
import Toolbar from '@material-ui/core/Toolbar';
import IconButton from '@material-ui/core/IconButton';
import MenuIcon from '@material-ui/icons/Menu';
import TocIcon from '@material-ui/icons/Toc';
import { useCurrentUser } from '../common/withUser';
import withErrorBoundary from '../common/withErrorBoundary';
import classNames from 'classnames';
import { AnalyticsContext, useTracking } from '../../lib/analyticsEvents';
import { forumTypeSetting, PublicInstanceSetting } from '../../lib/instanceSettings';
const forumHeaderTitleSetting = new PublicInstanceSetting<string>('forumSettings.headerTitle', "LESSWRONG", "warning")
const forumShortTitleSetting = new PublicInstanceSetting<string>('forumSettings.shortForumTitle', "LW", "warning")
const styles = (theme: ThemeType): JssStyles => ({
appBar: {
boxShadow: theme.palette.boxShadow.appBar,
color: theme.palette.header.text,
backgroundColor: theme.palette.header.background,
position: "static",
width: "100%",
display: "flex",
zIndex: 1100,
boxSizing: "border-box",
flexShrink: 0,
flexDirection: "column",
},
root: {
// This height (including the breakpoint at xs/600px) is set by Headroom, and this wrapper (which surrounds
// Headroom and top-pads the page) has to match.
height: 64,
[theme.breakpoints.down('xs')]: {
height: 56,
},
flexGrow: 1,
"@media print": {
display: "none"
}
},
titleSubtitleContainer: {
display: 'flex',
alignItems: 'center'
},
title: {
flex: 1,
position: "relative",
top: 3,
paddingRight: theme.spacing.unit,
color: theme.palette.text.secondary,
},
titleLink: {
color: theme.palette.header.text,
fontSize: 19,
'&:hover, &:focus, &:active': {
textDecoration: 'none',
opacity: 0.7,
},
display: 'flex',
alignItems: 'center',
},
menuButton: {
marginLeft: -theme.spacing.unit,
marginRight: theme.spacing.unit,
},
siteLogo: {
marginLeft: -theme.spacing.unit * 1.5,
},
hideLgUp: {
[theme.breakpoints.up('lg')]: {
display:"none"
}
},
hideMdDown: {
[theme.breakpoints.down('md')]: {
display:"none"
}
},
hideSmDown: {
[theme.breakpoints.down('sm')]: {
display: "none",
},
},
hideMdUp: {
[theme.breakpoints.up('md')]: {
display: "none",
},
},
rightHeaderItems: {
marginRight: -theme.spacing.unit,
display: "flex",
},
// Prevent rearranging of mobile header when search loads after SSR
searchSSRStandin: {
minWidth: 48
},
headroom: {
// Styles for header scrolling, provided by react-headroom
// https://github.com/KyleAMathews/react-headroom
"& .headroom": {
top: 0,
left: 0,
right: 0,
zIndex: 1300,
},
"& .headroom--unfixed": {
position: "relative",
transform: "translateY(0)",
},
"& .headroom--scrolled": {
transition: "transform 200ms ease-in-out",
},
"& .headroom--unpinned": {
position: "fixed",
transform: "translateY(-100%)",
},
"& .headroom--pinned": {
position: "fixed",
transform: "translateY(0%)",
},
},
headroomPinnedOpen: {
"& .headroom--unpinned": {
transform: "none !important",
},
"& .headroom--unfixed": {
position: "fixed !important",
},
},
});
const Header = ({standaloneNavigationPresent, toggleStandaloneNavigation, toc, searchResultsArea, classes}: {
standaloneNavigationPresent: boolean,
toggleStandaloneNavigation: ()=>void,
toc: any,
searchResultsArea: React.RefObject<HTMLDivElement>,
classes: ClassesType,
}) => {
const [navigationOpen, setNavigationOpenState] = useState(false);
const [notificationOpen, setNotificationOpen] = useState(false);
const [notificationHasOpened, setNotificationHasOpened] = useState(false);
const [searchOpen, setSearchOpenState] = useState(false);
const [unFixed, setUnFixed] = useState(true);
const currentUser = useCurrentUser();
const { captureEvent } = useTracking()
const updateCurrentUser = useUpdateCurrentUser();
const theme = useTheme();
const setNavigationOpen = (open: boolean) => {
setNavigationOpenState(open);
captureEvent("navigationBarToggle", {open: open})
}
const handleSetNotificationDrawerOpen = (isOpen: boolean): void => {
if (!currentUser) return;
if (isOpen) {
void updateCurrentUser({lastNotificationsCheck: new Date()});
setNotificationOpen(true);
setNotificationHasOpened(true);
} else {
setNotificationOpen(false);
}
}
const handleNotificationToggle = () => {
if (!currentUser) return;
const { lastNotificationsCheck } = currentUser
captureEvent("notificationsIconToggle", {open: !notificationOpen, previousCheck: lastNotificationsCheck})
handleSetNotificationDrawerOpen(!notificationOpen);
}
// We do two things when the search is open:
// 1) Pin the header open with the Headroom component
// 2) Hide the username on mobile so users with long usernames can still
// enter search queries
// Called by SearchBar.
const setSearchOpen = (isOpen: boolean) => {
if (isOpen) { captureEvent("searchToggle", {"open": isOpen}) }
setSearchOpenState(isOpen);
}
const renderNavigationMenuButton = () => {
// The navigation menu button either toggles a free floating sidebar, opens
// a drawer with site navigation, or a drawer with table of contents. (This
// is structured a little oddly because the hideSmDown/hideMdUp filters
// cause a misalignment if they're in the wrong part of the tree.)
return <React.Fragment>
{toc?.sections
? <>
<div className={classes.hideSmDown}>
<IconButton
className={classNames(
classes.menuButton,
{[classes.hideLgUp]: standaloneNavigationPresent && unFixed}
)}
color="inherit"
aria-label="Menu"
onClick={()=>setNavigationOpen(true)}
>
<MenuIcon />
</IconButton>
</div>
<div className={classes.hideMdUp}>
<IconButton
className={classNames(
classes.menuButton,
{[classes.hideLgUp]: standaloneNavigationPresent && unFixed}
)}
color="inherit"
aria-label="Menu"
onClick={()=>setNavigationOpen(true)}
>
<TocIcon />
</IconButton>
</div>
</>
: <IconButton
className={classNames(
classes.menuButton,
{[classes.hideLgUp]: standaloneNavigationPresent && unFixed}
)}
color="inherit"
aria-label="Menu"
onClick={()=>setNavigationOpen(true)}
>
<MenuIcon/>
</IconButton>
}
{standaloneNavigationPresent && unFixed && <IconButton
className={classNames(
classes.menuButton,
classes.hideMdDown
)}
color="inherit"
aria-label="Menu"
onClick={toggleStandaloneNavigation}
>
<MenuIcon />
</IconButton>}
</React.Fragment>
}
const hasLogo = forumTypeSetting.get() === 'EAForum'
const {
SearchBar, UsersMenu, UsersAccountMenu, NotificationsMenuButton, NavigationDrawer,
NotificationsMenu, KarmaChangeNotifier, HeaderSubtitle, Typography
} = Components;
return (
<AnalyticsContext pageSectionContext="header">
<div className={classes.root}>
<Headroom
disableInlineStyles
downTolerance={10} upTolerance={10}
height={64}
className={classNames(
classes.headroom,
{ [classes.headroomPinnedOpen]: searchOpen }
)}
onUnfix={() => setUnFixed(true)}
onUnpin={() => setUnFixed(false)}
>
<header className={classes.appBar}>
<Toolbar disableGutters={forumTypeSetting.get() === 'EAForum'}>
{renderNavigationMenuButton()}
<Typography className={classes.title} variant="title">
<div className={classes.hideSmDown}>
<div className={classes.titleSubtitleContainer}>
<Link to="/" className={classes.titleLink}>
{hasLogo && <div className={classes.siteLogo}><Components.SiteLogo/></div>}
{forumHeaderTitleSetting.get()}
</Link>
<HeaderSubtitle />
</div>
</div>
<div className={classes.hideMdUp}>
<Link to="/" className={classes.titleLink}>
{hasLogo && <div className={classes.siteLogo}><Components.SiteLogo/></div>}
{forumShortTitleSetting.get()}
</Link>
</div>
</Typography>
<div className={classes.rightHeaderItems}>
<NoSSR onSSR={<div className={classes.searchSSRStandin} />} >
<SearchBar onSetIsActive={setSearchOpen} searchResultsArea={searchResultsArea} />
</NoSSR>
{currentUser && <div className={searchOpen ? classes.hideMdDown : undefined}>
<AnalyticsContext pageSectionContext="usersMenu">
<UsersMenu />
</AnalyticsContext>
</div>}
{!currentUser && <UsersAccountMenu />}
{currentUser && <KarmaChangeNotifier documentId={currentUser._id}/>}
{currentUser && <NotificationsMenuButton
toggle={handleNotificationToggle}
open={notificationOpen}
currentUser={currentUser}
/>}
</div>
</Toolbar>
</header>
<NavigationDrawer
open={navigationOpen}
handleOpen={() => setNavigationOpen(true)}
handleClose={() => setNavigationOpen(false)}
toc={toc}
/>
</Headroom>
{currentUser && <NotificationsMenu
open={notificationOpen}
hasOpened={notificationHasOpened}
setIsOpen={handleSetNotificationDrawerOpen}
/>}
</div>
</AnalyticsContext>
)
}
const HeaderComponent = registerComponent('Header', Header, {
styles,
hocs: [withErrorBoundary]
});
declare global {
interface ComponentTypes {
Header: typeof HeaderComponent
}
} | the_stack |
import { TestHelper } from './TestHelper';
describe('noStringBasedSetTimeoutRule', (): void => {
const RULE_NAME: string = 'no-string-based-set-timeout';
it('should not throw error - case 1', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutTestInput-error.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, []);
});
it('should not throw error - case 2', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutTestInput-case2.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, []);
});
it('should pass when function parameter is Function', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutTestInput-functionParamFunction.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, []);
});
it('should support type inference on shadowed variables', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutTestInput-shadowedVariables.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, [
{
failure: 'Forbidden setTimeout string parameter: moduleProp1',
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { character: 9, line: 17 }
},
{
failure: 'Forbidden setTimeout string parameter: globalFunction1',
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { character: 9, line: 19 }
},
{
failure: 'Forbidden setTimeout string parameter: globalProp2',
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { character: 9, line: 20 }
},
{
failure: 'Forbidden setTimeout string parameter: globalProp1',
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { character: 13, line: 25 }
},
{
failure: 'Forbidden setTimeout string parameter: moduleProp1',
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { character: 13, line: 26 }
},
{
failure: 'Forbidden setTimeout string parameter: globalProp1',
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { character: 21, line: 35 }
},
{
failure: 'Forbidden setTimeout string parameter: globalProp3',
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { character: 21, line: 37 }
},
{
failure: 'Forbidden setTimeout string parameter: globalFunction1',
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { character: 21, line: 38 }
},
{
failure: 'Forbidden setTimeout string parameter: moduleProp1',
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { character: 21, line: 40 }
},
{
failure: 'Forbidden setTimeout string parameter: moduleProp2',
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { character: 21, line: 41 }
},
{
failure: 'Forbidden setTimeout string parameter: moduleProp1',
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { character: 13, line: 48 }
}
]);
});
it('should not throw error - case 3', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutTestInput-error3.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, []);
});
it('should not throw error - case 4', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutTestInput-error4.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, [
{
failure: 'Forbidden setTimeout string parameter: this.onAnimationEnd()',
name: 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutTestInput-error4.ts',
ruleName: 'no-string-based-set-timeout',
startPosition: { line: 10, character: 13 }
}
]);
});
it('should not throw error - case 5', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutTestInput-error5.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, []);
});
it('should not produce violations', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutTestInput-case3.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, []);
});
it('should produce violations for string literals', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-string-literals.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, [
{
ruleName: 'no-string-based-set-timeout',
failure: 'Forbidden setTimeout string parameter: "var x = \'should fail\'"',
name: 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-string-literals.ts',
startPosition: { line: 2, character: 1 }
},
{
ruleName: 'no-string-based-set-timeout',
failure: 'Forbidden setTimeout string parameter: "var x = \'should fail\'"',
name: 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-string-literals.ts',
startPosition: { line: 3, character: 1 }
},
{
ruleName: 'no-string-based-set-timeout',
failure: 'Forbidden setTimeout string parameter: "var x = \'should fail\'"',
name: 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-string-literals.ts',
startPosition: { line: 4, character: 1 }
}
]);
});
it('should produce violations for string variables', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-string-variables.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, [
{
ruleName: 'no-string-based-set-timeout',
failure: 'Forbidden setTimeout string parameter: typedStringVariable',
name: 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-string-variables.ts',
startPosition: { line: 4, character: 1 }
},
{
ruleName: 'no-string-based-set-timeout',
failure: 'Forbidden setTimeout string parameter: typedStringVariable',
name: 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-string-variables.ts',
startPosition: { line: 5, character: 1 }
},
{
ruleName: 'no-string-based-set-timeout',
failure: 'Forbidden setTimeout string parameter: typedStringVariable',
name: 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-string-variables.ts',
startPosition: {
line: 6,
character: 1
}
}
]);
});
it('should produce violations for any variables', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-any-variables.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, [
{
ruleName: 'no-string-based-set-timeout',
name: 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-any-variables.ts',
failure: 'Forbidden setTimeout string parameter: anyVariable',
startPosition: { line: 4, character: 1 }
},
{
ruleName: 'no-string-based-set-timeout',
name: 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-any-variables.ts',
failure: 'Forbidden setTimeout string parameter: anyVariable',
startPosition: { line: 5, character: 1 }
},
{
ruleName: 'no-string-based-set-timeout',
failure: 'Forbidden setTimeout string parameter: anyVariable',
name: 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-any-variables.ts',
startPosition: { line: 6, character: 1 }
}
]);
});
it('should produce violations for any functions', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-any-functions.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, [
{
failure: 'Forbidden setTimeout string parameter: untypedCreateFunction()',
name: 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-any-functions.ts',
ruleName: 'no-string-based-set-timeout',
startPosition: { line: 4, character: 1 }
},
{
failure: 'Forbidden setTimeout string parameter: untypedCreateFunction()',
name: 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-any-functions.ts',
ruleName: 'no-string-based-set-timeout',
startPosition: { line: 5, character: 1 }
},
{
failure: 'Forbidden setTimeout string parameter: untypedCreateFunction()',
name: 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-any-functions.ts',
ruleName: 'no-string-based-set-timeout',
startPosition: { line: 6, character: 1 }
}
]);
});
it('should produce violations for string functions', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-string-functions.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, [
{
failure: 'Forbidden setTimeout string parameter: stringFunction()',
name: 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-string-functions.ts',
ruleName: 'no-string-based-set-timeout',
startPosition: { line: 6, character: 1 }
},
{
failure: 'Forbidden setTimeout string parameter: stringFunction()',
name: 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-string-functions.ts',
ruleName: 'no-string-based-set-timeout',
startPosition: { line: 7, character: 1 }
},
{
failure: 'Forbidden setTimeout string parameter: stringFunction()',
name: 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-string-functions.ts',
ruleName: 'no-string-based-set-timeout',
startPosition: { line: 8, character: 1 }
}
]);
});
it('should produce violations for parameters', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-parameters.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, [
{
failure: 'Forbidden setTimeout string parameter: stringArg',
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { line: 3, character: 5 }
},
{
failure: 'Forbidden setTimeout string parameter: anyArg',
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { line: 6, character: 5 }
}
]);
});
it('should not produce violations what used to be a false positive case', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutTestInput-formerFalsePositive.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, []);
});
it('should not fail within a constructor', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-constructor.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, [
{
failure: 'Forbidden setTimeout string parameter: arg1',
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { character: 9, line: 3 }
}
]);
});
it('should create violations on template strings', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutFailingTestInput-template-strings.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, [
{
// tslint:disable-next-line:no-invalid-template-strings
failure: 'Forbidden setTimeout string parameter: `${data}`',
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { character: 1, line: 2 }
}
]);
});
it('should pass all Issue #46 usages', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutTestInput-issue46.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, [
{
failure: "Forbidden setTimeout string parameter: 'alert(' + alertNum + ')'",
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { character: 9, line: 13 }
},
{
failure: "Forbidden setTimeout string parameter: 'alert(' + alertNum + ')'",
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { character: 9, line: 14 }
},
{
// tslint:disable-next-line:no-invalid-template-strings
failure: 'Forbidden setTimeout string parameter: `alert(${alertNum})`',
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { character: 9, line: 17 }
},
{
// tslint:disable-next-line:no-invalid-template-strings
failure: 'Forbidden setTimeout string parameter: `alert(${alertNum})`',
name: inputFile,
ruleName: 'no-string-based-set-timeout',
startPosition: { character: 9, line: 18 }
}
]);
});
it('should produce violation for string variable - case 4', (): void => {
const inputFile: string = 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutTestInput-case4.ts';
TestHelper.assertViolationsWithTypeChecker(RULE_NAME, inputFile, [
{
ruleName: 'no-string-based-set-timeout',
failure: 'Forbidden setTimeout string parameter: typedStringVariable',
name: 'test-data/NoStringBasedSetTimeout/NoStringBasedSetTimeoutTestInput-case4.ts',
startPosition: { line: 9, character: 5 }
}
]);
});
}); | the_stack |
import * as fs from 'fs-extra';
import * as schema from './api-schema';
import {
ApiDefinitions, asArray, ExtendedApi, getTitle, readJsonDefs, hasChangeEvent, plainType, isInterfaceReference,
isUnion, isTuple, isMap, isIndexedMap, isCallback, getEventTypeName, getJSXChildElementType, supportsJsx,
getJSXContentType
} from './common';
import {join, parse, resolve} from 'path';
type Link = {href?: string, title?: string};
type TypeLinks = {[type: string]: Link};
type NamedEvents = Array<schema.Event & {name: string}>;
type Config = {sourcePath: string, targetPath: string, snippetsPath: string, version: string};
type Member = schema.Method | schema.Method[] | schema.Property;
type TocEntry = {title: string, url: string, priority?: boolean};
const HTML_LT = '<';
const HTML_GT = '>';
const HTML_QUOT = '"';
const HTML_PIPE = '|';
const CHANGE_EVENT_INTERFACE = 'PropertyChangedEvent';
const quot = (...str) => HTML_QUOT + str.join('') + HTML_QUOT;
const tag = (...str) => HTML_LT + str.join('') + HTML_GT;
const code = (...str) => '<code style="white-space: nowrap">' + str.join('') + '</code>';
const nbsp = (count: number) => new Array(count + 1).join(' ');
const pipeOr = (arr: string[], indent = null) => (indent !== null && arr.length > 1)
? nbsp(indent) + arr.join('<br/>' + nbsp(indent) + HTML_PIPE + ' ')
: arr.join(' ' + HTML_PIPE + ' ');
const mdnTitle = type => `View ${quot(type)} on MDN`;
const MSG_PROVISIONAL = '**Note:** this API is provisional and may change in a future release.';
const LANG_TYPE_LINKS: TypeLinks = {
Function: {
href: 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function',
title: mdnTitle('Function')
},
Object: {
href: 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object',
title: mdnTitle('Object')
},
object: {
href: 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object',
title: mdnTitle('Object')
},
string: {
href: 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#string_type',
title: mdnTitle('string')
},
boolean: {
href: 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#boolean_type',
title: mdnTitle('boolean')
},
number: {
href: 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#number_type',
title: mdnTitle('number')
},
null: {
href: 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#null_type',
title: mdnTitle('null')
},
undefined: {
href: 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#undefined_type',
title: mdnTitle('undefined')
},
Promise: {
href: 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise',
title: mdnTitle('Promise')
},
IterableIterator: {
href: 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#the_iterator_protocol',
title: mdnTitle('IterableIterator')
},
Array: {
href: 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array',
title: mdnTitle('Array')
},
ArrayBuffer: {
href: 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer',
title: mdnTitle('ArrayBuffer')
},
TypedArray: {
href: 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/Typed_arrays',
title: mdnTitle('typed arrays')
},
Worker: {
href: 'https://developer.mozilla.org/en-US/docs/Web/API/Worker',
title: mdnTitle('Worker')
},
Selector: {
href: '../selector.md',
title: 'More about selectors'
},
SelectorString: {
href: '../selector.md',
title: 'More about selectors'
},
any: {
title: 'Literally any JavaScript value'
},
this: {
href: '#',
title: 'This object'
}
};
const PLAYGROUND = 'https://playground.tabris.com/';
const GITHUB_TREE = 'https://github.com/eclipsesource/tabris-js/tree/';
const DECORATORS_TREE = 'https://github.com/eclipsesource/tabris-decorators/tree/';
const FILE_TYPES = {
js: 'JavaScript',
jsx: 'JSX',
ts: 'TypeScript',
tsx: 'TypeScript/JSX',
console: 'Interactive Console'
};
const TOC_INCLUDE: {[cat: string]: TocEntry[]} = {
core: [],
service: [
{
title: 'Worker',
url: 'w3c-api.html#worker'
}
],
widget: [
{
title: 'Overview',
url: 'api/widget-overview.html',
priority: true
}
],
popup: [],
net: [
{
title: 'XMLHttpRequest',
url: 'w3c-api.html#xmlhttprequest'
},
{
title: 'WebSocket',
url: 'w3c-api.html#websocket'
}
],
data: []
};
exports.generateDoc = function generateDoc(options: Config) {
const generator = new DocumentationGenerator(options);
generator.generate();
};
class DocumentationGenerator {
public readonly targetPath: string;
public readonly sourcePath: string;
public readonly apiSourcePath: string;
public readonly apiTargetPath: string;
public readonly defs: ApiDefinitions;
public readonly snippets: {[name: string]: string} = {};
public readonly typeLinks: TypeLinks = Object.assign({}, LANG_TYPE_LINKS);
public readonly version: string;
constructor(options: Config) {
if (
!options ||
!options.snippetsPath ||
!options.sourcePath ||
!options.targetPath ||
!options.version
) {
throw new Error('Missing Option in ' + JSON.stringify(options));
}
this.version = options.version;
this.targetPath = options.targetPath;
this.sourcePath = options.sourcePath;
this.apiSourcePath = join(options.sourcePath, 'api');
this.apiTargetPath = join(options.targetPath, 'api');
this.readSnippets(options.snippetsPath);
this.defs = readJsonDefs(this.apiSourcePath);
this.preProcessDefinitions();
this.copyResources();
}
public generate() {
this.renderOverview();
this.renderAPI();
this.renderIndex();
}
public renderGithubUrl(link: schema.Snippet) {
const preRelease = this.version.indexOf('-') !== -1;
if (link.repo === 'tabris-decorators') {
return [
DECORATORS_TREE,
preRelease ? 'master' : 'v' + this.version,
'/examples/',
link.snippet
].join('');
} else {
return [
GITHUB_TREE,
preRelease ? 'master' : 'v' + this.version,
link.snippet.endsWith('json') ? '/schema/' : '/snippets/',
link.snippet
].join('');
}
}
private copyResources() {
fs.copySync(this.sourcePath, this.targetPath, {
filter: (path) => !/[\\/]api[\\/].+\./.test(path) || /[\\/]img[\\/].+\./.test(path)
});
getFiles(this.targetPath)
.filter(path => /\.md$/.test(path))
.forEach(path => fs.writeFileSync(
path,
this.replaceVariables(fs.readFileSync(path).toString(), '.')
));
}
private preProcessDefinitions() {
Object.keys(this.defs).forEach(title => {
if (this.defs[title].type) {
this.addTypeLink(
this.defs[title].type,
`${parse(this.defs[title].file).name}.md`,
title + (this.defs[title].object ? ' Object' : ' Class') + ' Reference'
);
}
for (const related in (this.defs[title].relatedTypes || {})) {
this.addTypeLink(
related,
`${parse(this.defs[title].file).name}.md#${this.defs[title].relatedTypes[related]}`,
title + (this.defs[title].object ? ' Object' : ' Class') + ' Type'
);
}
Object.keys(this.defs[title].properties || {}).forEach(property => this.addSnippets(
`${title}-${property}`,
this.defs[title].properties[property]
));
Object.keys(this.defs[title].methods || {}).forEach(method =>
asArray(this.defs[title].methods[method]).forEach(signature => this.addSnippets(
`${title}-${method}`,
signature
)
));
Object.keys(this.defs[title].events || {}).forEach(event => this.addSnippets(
`${title}-${event}`,
this.defs[title].events[event]
));
Object.keys((this.defs[title].statics || {}).properties || {}).forEach(property => this.addSnippets(
`${title}-${property}`,
this.defs[title].statics.properties[property]
));
Object.keys((this.defs[title].statics || {}).methods || {}).forEach(method =>
asArray(this.defs[title].statics.methods[method]).forEach(signature => this.addSnippets(
`${title}-${method}`,
signature
)
));
this.addSnippets(title, this.defs[title]);
if (this.defs[title].links.length === 0) {
console.warn('No links or snippets given for ' + title);
}
});
}
private readSnippets(path: string) {
fs.readdirSync(path).forEach(snippet => {
if (snippet.endsWith('.js') || snippet.endsWith('.jsx') || snippet.endsWith('.ts') || snippet.endsWith('.tsx')) {
this.snippets[snippet] = join(path, snippet).replace(/\\/g, '/');
}
});
}
private renderOverview() {
fs.writeFileSync(join(this.apiTargetPath, 'widget-overview.md'), new OverviewRenderer(this).render());
}
private renderAPI() {
Object.keys(this.defs).forEach(title => {
try {
if (!this.defs[title].interface) {
const targetFile = join(this.apiTargetPath, parse(this.defs[title].file).name + '.md');
fs.writeFileSync(
targetFile,
this.replaceVariables(new DocumentRenderer(this, title).render(), './api')
);
}
} catch (ex) {
throw new Error(title + ': ' + ex.message + '\n' + ex.stack);
}
});
}
private replaceVariables(contents: string, cwd: '.' | './api'): string {
const apiDir = cwd === '.' ? './api/' : './';
return contents.replace(/\$\{doc:[a-zA-Z.-]+\}/g, subString => {
const varName = subString.slice(6, -1);
if (varName === 'moduleversion') {
return this.version;
}
if (varName === 'snippetsUrl') {
return this.renderGithubUrl({repo: 'tabris', snippet: ''}).slice(0, -1);
}
if (varName === 'examplesUrl') {
return this.renderGithubUrl({repo: 'tabris-decorators', snippet: ''}).slice(0, -1);
}
if (this.typeLinks[varName]) {
const href = this.typeLinks[varName].href;
const link = href.startsWith('http') ? href : apiDir + href;
return '[`' + varName + '`](' + link + ')';
}
if (varName.endsWith('Url') && this.typeLinks[varName.slice(0, -3)]) {
const href = this.typeLinks[varName.slice(0, -3)].href;
return href.startsWith('http') ? href : apiDir + href;
}
if (/^[a-z-]+\.[jt]sx?$/.test(varName) || /^[a-z]+\.json?$/.test(varName)) {
return this.renderGithubUrl({repo: 'tabris', snippet: varName});
}
if (/^[a-z-]+$/.test(varName)) {
return this.renderGithubUrl({repo: 'tabris-decorators', snippet: varName});
}
throw new Error('No replacement for ' + subString);
});
}
private renderIndex() {
const tocFile = join(this.targetPath, 'toc.yml');
const toTocEntry = (def: ExtendedApi) => ({title: getTitle(def), url: `api/${parse(def.file).name}.html`});
const render = ({title, url}: TocEntry) => ` - title: ${title}\n url: ${url}`;
const content = {};
['core', 'service', 'widget', 'popup', 'net', 'data'].forEach(category => {
content[category] = Object.keys(this.defs)
.map(key => this.defs[key])
.filter(def => def.category === category)
.filter(def => !def.interface)
.map(toTocEntry)
.concat(TOC_INCLUDE[category])
.sort(compareTitles)
.map(render)
.join('\n');
});
console.log('render TOC:');
const text = fs.readFileSync(tocFile, 'utf-8').replace(/<%=\s*(\S+)\s*%>/g, (_, word) => content[word]);
console.log(text);
fs.writeFileSync(tocFile, text);
}
private addTypeLink(type: string, href: string, title: string) {
if (this.typeLinks[type]) {
throw new Error('Duplicate type ' + type);
}
this.typeLinks[type] = {href, title};
}
private addSnippets(title: string, target: {links?: schema.Links}) {
const links = target.links = target.links || [];
const snippets = links
.filter(isSnippet)
.filter(snippet => snippet.repo !== 'tabris-decorators')
.map(entry => {
if (!this.snippets[entry.snippet]) {
throw new Error(title + ' - No such snippet: ' + entry.snippet);
}
return entry.snippet;
});
Object.keys(this.snippets).forEach(snippet => {
const autoInclude = snippet.startsWith(title.toLowerCase() + '.')
|| snippet.startsWith(title.toLowerCase() + '-');
if (autoInclude && snippets.indexOf(snippet) === -1) {
console.warn('Auto-add snippet ' + snippet + ' to ' + title);
links.push({snippet});
}
});
}
}
class OverviewRenderer {
private defs: ApiDefinitions;
constructor(docGenerator: DocumentationGenerator) {
this.defs = docGenerator.defs;
}
public render() {
return [
'---\n---',
'# Widget Overview\n',
'To build a comprehensive user experience, different UI components can be combined. The following' +
' list shows all available widgets for Android and iOS.\n',
this.renderWidgets()
].filter(value => !!value).join('\n');
}
private renderWidgets() {
return Object.keys(this.defs).map((key) => {
const def = this.defs[key];
if (def.isWidget) {
const images = renderImages(def);
if (images) {
const link = `${parse(def.file).name}.html`;
return [
`## [${def.type}](${link})\n`,
`${def.description}\n`,
`<div><a style="text-decoration: none" href="${link}">${images}</a></div>\n`
].filter(value => !!value).join('\n');
}
}
return null;
}).filter((entry) => entry)
.join('\n');
}
}
class DocumentRenderer {
private readonly def: ExtendedApi;
private readonly gen: DocumentationGenerator;
constructor(docGenerator: DocumentationGenerator, private readonly title: string) {
this.gen = docGenerator;
this.def = this.gen.defs[this.title];
}
public render() {
let heading = this.title;
if (this.def.namespace === 'global' && this.def.object) {
heading = `Global object "${this.def.object}"`;
} else if (this.def.object) {
heading = `Object "${this.def.object}"`;
} else if (this.def.isWidget || this.def.type) {
heading = `Class "${this.def.type}"`;
}
return [
'---\n---',
'# ' + heading + '\n',
this.renderExtends(),
this.renderDescription(),
renderImages(this.def),
this.renderSummary(),
this.renderInfoFile(),
this.renderExamples(),
this.renderLinks(this.def.links),
this.renderConstructor(),
this.renderMembers(),
this.renderInfoFile('.post')
].filter(value => !!value).join('\n');
}
private renderDescription() {
return this.def.description ? this.def.description + '\n\n' : '';
}
private renderInfoFile(postfix = '') {
const infoFile = join(this.gen.apiSourcePath, parse(this.def.file).name + postfix + '.md');
if (!fs.existsSync(infoFile)) {
return '';
}
return fs.readFileSync(infoFile, 'utf-8') + '\n';
}
private renderExamples() {
const exampleFiles = ['js', 'jsx', 'ts', 'tsx', 'console']
.map(lang => join(this.gen.apiSourcePath, parse(this.def.file).name + '.' + lang))
.filter(file => fs.existsSync(file));
if (!exampleFiles.length) {
return '';
}
return '## Examples\n' + exampleFiles.map(
file => this.renderExamplesFile(file)
).join('\n');
}
private renderExamplesFile(path: string) {
const content = fs.readFileSync(path, 'utf-8');
const segments = content.split(/^\/\/ (.+:)\n\n/gm);
const result: string[] = ['### ' + FILE_TYPES[fileExt(path)], ''];
if (segments.length % 2 === 1) {
if (segments[0].trim()) {
segments.unshift('');
} else {
segments.shift();
}
}
for (let i = 0; i < segments.length; i += 2) {
result.push(
segments[i],
'```' + (fileExt(path) === 'console' ? '' : fileExt(path)),
segments[i + 1].trim(),
'```',
''
);
}
return result.join('\n') + '\n';
}
private renderExtends() {
if (!this.def.type) {
return '';
}
const hierarchy = [];
let currentType = this.def.type;
while (currentType) {
hierarchy.unshift(this.renderTypeRef(currentType));
if (currentType === 'Object') {
currentType = null;
} else if (this.gen.defs[currentType]) {
currentType = plainType(this.gen.defs[currentType].extends) || 'Object';
} else {
throw new Error('Could not find super type for ' + currentType);
}
}
return hierarchy.join(' > ') + '\n';
}
private renderSummary() {
const result = [];
if (this.def.type) {
result.push('Type: | ');
result.push(code(
this.def.type,
this.def.generics ? this.renderGenericsDef(this.def.generics) : '',
this.def.generics && this.def.generics.length > 1 ? '<br/>extends ' : ' extends ',
this.renderTypeRef(this.def.extends || 'Object')
));
result.push('\n');
if (this.def.generics) {
result.push(
'Generics: | <span id="generics">',
this.renderGenericDefSummary(), '</span>\n'
);
}
result.push(
'Constructor: | ',
this.def.constructor.access || 'public', '\n',
'Singleton:', ' | ',
this.def.object ? '`' + this.def.object + '`' : 'No', '\n'
);
if (this.def.module) {
result.push(
'**Module:** |',
this.renderHtmlLink(
'**' + this.def.module + '**',
{href: 'https://www.npmjs.com/package/' + this.def.module}
),
'\n'
);
}
if (this.def.namespace !== false) {
result.push(
'Namespace: |',
this.renderHtmlLink(this.def.namespace || 'tabris', {href: '../modules.md#startup'}), '\n'
);
}
const subclasses = Object.keys(this.gen.defs)
.filter(def => plainType(this.gen.defs[def].extends) === this.def.type);
result.push('Direct subclasses: | ');
if (subclasses.length) {
result.push(subclasses.map(name => code(this.renderTypeRef(name))).join(', '), '\n');
} else {
result.push('None\n');
}
result.push(this.renderJSXSummary());
result.push('\n');
}
return result.join('');
}
private renderGenericDefSummary() {
const result: string[] = [];
this.def.generics.forEach(param => {
result.push(param.name, ': ');
if (param.description) {
result.push('*', param.description);
if (param.extends) {
result.push(' Must be a subclass of ', code(this.renderTypeRef(param.extends)));
result.push(' and defaults to ', code(this.renderTypeRef(param.default)), '.');
} else {
result.push(' Defaults to ', code(this.renderTypeRef(param.default)), '.');
}
result.push('*');
} else {
result.push(code(this.renderTypeRef(param.extends || param.default)));
}
result.push('<br/>');
});
return result.join('');
}
private renderJSXSummary() {
const result = [];
result.push('JSX Support: | ');
if (supportsJsx(this.def)) {
result.push('Element: ', code(this.renderJSXLink(this.def.type)), '<br/>');
result.push('Parent Elements: ');
result.push(this.renderParentElements());
result.push('<br/>');
result.push('Child Elements: ', this.renderChildElements(), '<br/>');
const contentProperties = Object.keys(this.def.properties || {})
.filter(prop => this.def.properties[prop].jsxContentProperty);
if (contentProperties.length) {
result.push('Element content sets: ');
result.push(contentProperties.map(property =>
`[${code(property)}](#${property.toLowerCase()})`
).join(', '));
}
} else {
result.push('No\n');
}
return result.join('');
}
private renderChildElements() {
if (!this.def.jsxChildren.length) {
return '*Not Supported*';
}
if (this.def.jsxChildren[0] === 'Widget') {
return '*Any standalone widget element*';
}
return this.def.jsxChildren.map(
childType => code(this.renderJSXLink(childType))
).join(', ');
}
private resolveGenerics(type: schema.TypeReference): schema.TypeReference {
if (!type) {
return type;
}
if (typeof type === 'string') {
const match = (this.def.generics || []).find(param => param.name === type);
if (match) {
return match.extends;
}
return type;
} else if (isInterfaceReference(type)) {
return {interface: type.interface, generics: type.generics.map(subType => this.resolveGenerics(subType))};
} else if (isUnion(type)) {
return {union: type.union.map(subType => this.resolveGenerics(subType))};
}
throw new Error('Can not resolve generics for ' + JSON.stringify(type));
}
private renderParentElements() {
if (this.def.jsxParents && this.def.jsxParents.length) {
// Only specific parents are allowed
return this.def.jsxParents
.map(type => code(this.renderJSXLink(type)))
.join(', ');
} else if (this.def.isWidget) {
// Any Composite without a explicit ChildType parameter
return Object.keys(this.gen.defs)
.filter(key => key === 'Composite' || this.gen.defs[key].extends === 'Composite')
.map(type => code(this.renderJSXLink(type)))
.join(', ');
}
return '*Not supported*';
}
private renderMembers() {
return [
this.renderAllMethods(),
this.renderAllProperties(),
this.renderAllEvents()
].filter(value => !!value).join('\n') + '\n';
}
private renderConstructor() {
if (!this.def.constructor || this.def.constructor.access !== 'public') {
return '';
}
const result = [
'## Constructor\n\n',
`### new ${this.def.type}${this.renderSignature(this.def.constructor.parameters)}\n\n`
];
if (this.def.constructor.parameters && this.def.constructor.parameters.length) {
result.push(this.renderMethodParamList(this.def.constructor.parameters));
result.push('\n');
}
return result.join('');
}
private renderAllMethods() {
const result = [];
const publicMethodKeys = Object.keys(this.def.methods || {})
.filter(name => isPublic(this.def.methods[name]) && isJS(this.def.methods[name])).sort();
const protectedMethodKeys = Object.keys(this.def.methods || {})
.filter(name => isProtected(this.def.methods[name]) && isJS(this.def.methods[name])).sort();
const staticMethodKeys = Object.keys((this.def.statics || {}).methods || {})
.filter(
name => isPublic(this.def.statics.methods[name]) && isJS(this.def.statics.methods[name])
).sort();
if (publicMethodKeys.length) {
result.push('## Methods\n\n');
result.push(this.renderMethods(this.def.methods, publicMethodKeys));
}
if (protectedMethodKeys.length) {
result.push('## Protected Methods\n\n');
result.push(`These methods are accessible only in classes extending ${code(this.def.type)}.\n\n`);
result.push(this.renderMethods(this.def.methods, protectedMethodKeys));
}
if (staticMethodKeys.length) {
result.push('## Static Methods\n\n');
result.push(this.renderMethods(this.def.statics.methods, staticMethodKeys));
}
return result.join('');
}
private renderMethods(methods: {[name: string]: schema.Method | schema.Method[]}, names: string[]) {
const result = [];
names.forEach(name => {
asArray(methods[name]).filter(sig => !sig.ts_only).forEach(desc => {
result.push(this.renderMethod(name, desc));
});
});
return result.join('');
}
private renderMethod(name: string, method: schema.Method) {
const result = [];
result.push('### ' + name + this.renderSignature(method.parameters) + '\n');
result.push(this.renderPlatforms(method.platforms) + '\n\n');
if (method.provisional) {
result.push('\n', MSG_PROVISIONAL, '\n');
}
if (method.description) {
result.push('\n', method.description, '\n');
} else {
console.log('No description for ' + this.title + ' method ' + name);
}
if (method.parameters && method.parameters.length) {
result.push('\n\n', this.renderMethodParamList(method.parameters), '\n\n');
}
result.push('\nReturns: ', code(this.renderTypeRef(method.returns)), '\n');
result.push(this.renderLinks(method.links));
return result.join('') + '\n';
}
private renderAllProperties() {
const {properties, statics, type} = this.def;
const result = [];
const publicPropertyKeys = Object.keys(properties || {})
.filter(name => isPublic(properties[name]) && isJS(properties[name])).sort();
const protectedPropertyKeys = Object.keys(properties || {})
.filter(name => isProtected(properties[name]) && isJS(properties[name])).sort();
const staticPropertyKeys = Object.keys((statics || {}).properties || {})
.filter(name => isJS(statics.properties[name])).sort();
if (publicPropertyKeys.length) {
result.push('## Properties\n\n');
publicPropertyKeys.forEach(name => {
result.push(this.renderProperty(properties, name));
});
}
if (protectedPropertyKeys.length) {
result.push('## Protected Properties\n\n');
result.push(`These properties are accessible only in classes extending ${code(type)}.\n\n`);
protectedPropertyKeys.forEach(name => {
result.push(this.renderProperty(properties, name));
});
}
if (staticPropertyKeys.length) {
result.push('## Static Properties\n\n');
staticPropertyKeys.forEach(name => {
result.push(this.renderProperty(statics.properties, name, true));
});
}
return result.join('');
}
private renderProperty(
properties: {[key: string]: schema.Property},
name: string,
isStatic: boolean = false
) {
const result = [];
const property = properties[name];
result.push('### ', name, '\n', this.renderPlatforms(property.platforms), '\n\n');
if (property.description) {
result.push(property.description + '\n');
}
result.push(this.renderPropertySummary(property, name, isStatic), '\n\n');
if (property.jsxContentProperty) {
const kind = getJSXContentType(this.gen.defs, this.def, property.type);
result.push(`\n\nWhen using ${this.def.type} as an JSX element `);
if (kind === 'text') {
result.push('the elements Text content is');
} else if (kind === 'element') {
result.push(code(this.renderJSXLink(getJSXChildElementType(this.def, property.type))));
result.push(' child elements are');
} else {
result.push('the elements content value is');
}
result.push(' mapped to this property.');
}
if (property.const && !isStatic && !this.def.object && this.def.constructor.access === 'public') {
result.push('\n\nThis property can only be set via constructor');
if (this.def.isWidget || this.def.extends === 'Popup') {
result.push(' or JSX');
}
result.push('. Once set, it cannot change anymore.\n\n');
}
result.push(this.renderLinks(property.links));
result.push('\n\n');
return result.join('');
}
private renderPlatforms(platforms: schema.Platforms) {
if (!platforms) {
return '';
}
const result = ['<p class="platforms">'];
const names = {ios: 'iOS', android: 'Android'};
for (const platform in names) {
if (platforms[platform] !== false) {
const name = names[platform];
result.push(`<span class='${platform}-tag' title='supported on ${name}'>${name}</span>`);
}
}
result.push('</p>');
return result.join('');
}
private renderPropertySummary(property: schema.Property, name: string, isStatic: boolean) {
const result = ['\nType: |'];
const propertyTypeText = this.renderPropertyTypeReference(property);
result.push(code(propertyTypeText));
result.push('\n');
if ('default' in property) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const def: any = property.default;
result.push(
'Default: | ',
(def === '' || def === '""' || def === '\'\'')
? this.renderTypeLink('(empty string)', 'string')
: code(this.literal(property.default, property.type)),
'\n'
);
}
result.push('Settable: | ');
if (this.def.isNativeObject) {
result.push(
this.renderHtmlLink(
property.readonly ? 'No'
: property.const ? 'By Constructor or JSX'
: 'Yes',
{href: '../widget-basics.html#widget-properties'}
)
);
} else {
result.push(property.readonly ? 'No' : 'Yes');
}
if (this.def.isNativeObject) {
result.push('\nChange Event: | ');
if (hasChangeEvent(property, isStatic)) {
result.push(`[\`${name}Changed\`](#${name.toLocaleLowerCase()}changed)`);
} else {
result.push('Not supported');
}
}
if (property.jsxContentProperty) {
const kind = getJSXContentType(this.gen.defs, this.def, property.type);
result.push('\nJSX Content Type: | ');
if (kind === 'text') {
result.push('[Text](../declarative-ui.md#jsx-specifics)\n');
} else if (kind === 'element') {
result.push(code(this.renderJSXLink(getJSXChildElementType(this.def, property.type))), '\n');
} else {
result.push(code(propertyTypeText));
}
}
result.push('\n');
if (!property.default && !property.readonly) {
// TODO: how to handle non-primitives?
console.log('No default value for ' + this.title + ' property ' + name);
}
return result.join('');
}
private renderPropertyTypeReference(property: schema.Property): string {
if (property.values) { // TODO: remove in favor of using only union types
return pipeOr(property.values.map(v => this.literal(v, property.type)));
}
return this.renderTypeRef(property.type, false);
}
private renderAllEvents() {
this.generateChangeEvents();
const typeEvents: NamedEvents = Object.keys(this.def.events || {})
.filter(name => !this.isChangeEvent(name))
.map(name => Object.assign({name}, this.def.events[name]));
const changeEvents: NamedEvents = Object.keys(this.def.events || {})
.filter(name => this.isChangeEvent(name))
.map(name => Object.assign({name}, this.def.events[name]));
const result = [];
if (Object.keys(typeEvents).length) {
result.push('## Events\n\n');
result.push(this.renderEvents(typeEvents));
}
if (Object.keys(changeEvents).length) {
result.push('## Change Events\n\n');
result.push(this.renderEvents(changeEvents));
}
return result.join('');
}
private renderEvents(events: NamedEvents) {
return events.map(({name, description, parameters, platforms, links}) => [
'### ', name, '\n\n', this.renderPlatforms(platforms),
description ? description + '\n\n' : '\n',
'EventObject Type: ',
this.renderEventInterface(name, parameters),
'\n\n',
parameters ? this.renderEventParamList(parameters) : 'This event has no additional parameter.\n',
this.renderLinks(links)
].join('')).join('');
}
private isChangeEvent(name: string) {
return this.def.events[name].eventObject === CHANGE_EVENT_INTERFACE;
}
private generateChangeEvents() {
if (!this.def.isNativeObject) {
return;
}
const properties = this.def.properties;
if (!properties || !Object.keys(properties).length) {
return;
}
if (!this.def.events) {
this.def.events = {};
}
Object.keys(properties)
.filter(name => hasChangeEvent(properties[name]))
.forEach(name => {
const standardDescription = `Fired when the [${name}](#${name.toLowerCase()}) property has changed.`;
this.def.events[`${name}Changed`] = {
description: properties[name].changeEventDescription || standardDescription,
eventObject: CHANGE_EVENT_INTERFACE,
parameters: {
value: {
type: properties[name].type,
description: `The new value of [${name}](#${name.toLowerCase()}).`
}
}
};
});
}
private renderSignature(parameters: schema.Parameter[]) {
return '(' + (parameters || []).map(param => {
const paramObject = typeof param === 'object' ? param : {name: param, optional: false};
return paramObject.name + (paramObject.optional ? '?' : '');
}).join(', ') + ')';
}
private renderEventParamList(parameters: {[name: string]: schema.Property}) {
return 'Property|Type|Description\n-|-|-\n' + Object.keys(parameters).sort().map(key => {
const param = parameters[key];
let type = 'any';
if (param.type) {
type = this.renderTypeRef(param.type, false);
} else {
console.log('No type for event parameter ' + key + ' in ' + this.title);
}
if (!param.description) {
console.log('No description for event parameter ' + key + ' in ' + this.title);
}
return [key, code(type), param.description || ''].join(' | ');
}).join('\n') + '\n\n';
}
private renderEventInterface(name: string, parameters: {[k: string]: schema.Property}): string {
const typeName = getEventTypeName(this.def, name, parameters);
const changeEvent = typeName === CHANGE_EVENT_INTERFACE;
const type = this.renderTypeRef(typeName);
const target = this.renderTypeRef(this.def.type);
if (changeEvent) {
return code(
type,
tag(
target,
', ',
this.renderTypeRef(parameters.value.type, true, true)
)
);
}
return code(type, tag(target));
}
private renderMethodParamList(parameters: schema.Parameter[]) {
return 'Parameter|Type|Description\n-|-|-\n'
+ this.flattenParamList(parameters).map(param => {
let type = 'any';
if (param.type) {
type = this.renderTypeRef(param.type, false);
} else {
console.log('No type for parameter ' + param.name + ' in ' + this.title);
}
const desc: string[] = [];
if (param.description) {
desc.push(param.description);
}
if (param.optional) {
desc.push('*Optional.*');
}
return [param.name, code(type), desc.join(' ')].join(' | ');
}).join('\n');
}
private flattenParamList(parameters: schema.Parameter[]) {
const result: schema.Parameter[] = [];
parameters.forEach(param => {
if (this.gen.defs[plainType(param.type)] && this.gen.defs[plainType(param.type)].interface) {
const paramObject = this.gen.defs[plainType(param.type)];
result.push(Object.assign({}, param, {type: 'object'}));
Object.keys(paramObject.properties).sort().forEach(name => {
result.push({
name: param.name + '.' + name,
...paramObject.properties[name]
});
});
} else {
result.push(param);
}
});
return result;
}
private renderLinks(links: schema.Links) {
if (!links || !links.length) {
return '';
}
return ['\nSee also:\n'].concat(links.map(link => {
if (isSnippet(link)) {
const title = `${(link.title || link.snippet)}`;
if (link.repo === 'tabris-decorators') {
const language = link.snippet.endsWith('jsx')
? '<span class=\'language jsx\'>JSX</span>'
: link.snippet.endsWith('js')
? '<span class=\'language js\'>JS</span>'
: '<span class=\'language tsx\'>TSX</span>';
return `[${language} ${title}](${this.gen.renderGithubUrl(link)})`;
} else {
const playgroundUrl = this.renderPlaygroundUrl(link);
const gitHubUrl = this.gen.renderGithubUrl(link);
const snippetType = link.snippet.split('.').pop().toLowerCase();
const language = `<span class='language ${snippetType}'>${snippetType.toUpperCase()}</span>`;
const playgroundHtml = [
'<span style="font-size: 75%;">',
`[<a href="${playgroundUrl}" style="color: cadetblue;">`,
'► Run in Playground</a>]',
'</span>'
].join('');
return `[${language} ${title}](${gitHubUrl}) ${playgroundHtml}`;
}
}
return `[${link.title}](${link.path})`;
})).join(' \n') + '\n';
}
private renderPlaygroundUrl(link: schema.Snippet) {
const preRelease = this.gen.version.indexOf('-') !== -1;
const ref = preRelease ? 'master' : 'v' + this.gen.version;
return `${PLAYGROUND}?gitref=${encodeURIComponent(ref)}&snippet=${encodeURIComponent(link.snippet)}`;
}
private renderTypeRef(ref: schema.TypeReference, flat: boolean = true, simplified: boolean = false): string {
if (!ref) {
return this.renderTypeLink('undefined');
}
if (typeof ref === 'string') {
return this.renderTypeLink(ref === 'void' ? 'undefined' : ref);
}
if (isInterfaceReference(ref)) {
if (simplified && ref.interface === 'Array') {
return this.renderTypeRef('Array', true, simplified);
}
if (ref.interface === 'Array' && ref.generics.every(type => typeof type === 'string')) {
return this.renderTypeRef(ref.generics[0]) + '[]';
}
if (simplified) {
return this.renderTypeRef(ref.interface, true, simplified);
}
return this.renderTypeRef(ref.interface)
+ '<' + ref.generics.map(type => this.renderTypeRef(type)).join(', ') + '>';
}
if (isUnion(ref)) {
if (simplified && ref.union.every(type => type === 'string')) {
return this.renderTypeRef('string');
}
return pipeOr(
ref.union.map(part => this.renderTypeRef(part, true, simplified)),
ref.union.length > 2 ? 0 : null
);
}
if (isTuple(ref)) {
return '['
+ ref.tuple.map(part => this.renderTypeRef(part, true, simplified)).join(', ')
+ ']';
}
if (isMap(ref)) {
if (simplified) {
return this.renderTypeRef('Object');
}
const {map} = ref;
const keys = Object.keys(map);
const comments = keys.map(key => this.renderPropertyAsComment(map[key]));
const hasComments = comments.some(comment => !!comment);
if (flat || (keys.length <= 2 && !hasComments)) {
if (hasComments) {
console.log('Flat map entries for ' + JSON.stringify(ref) + ' will omit some type information');
}
const content = keys.map(key =>
`${key}: ${this.renderTypeRef(map[key].ts_type || map[key].type)}`
).join(', ');
return '{' + content + '}';
}
const text = ['{<br/>'];
keys.forEach((key, index) => {
const type = map[key].type;
if (isUnion(type)) {
text.push(`${nbsp(2)}${key}:${comments[index]}<br/>`);
text.push(pipeOr(
type.union.map(part => this.renderTypeRef(part, true, simplified)),
type.union.length > 2 ? 4 : null
));
text.push(index !== keys.length - 1 ? ',' : '');
} else {
text.push(`${nbsp(2)}${key}: ${this.renderTypeRef(type)}`);
text.push(index !== keys.length - 1 ? ',' : '');
text.push(comments[index]);
}
text.push('<br/>');
});
text.push('}');
return text.join('');
}
if (isIndexedMap(ref)) {
if (simplified) {
return this.renderTypeRef('Object');
}
const name = Object.keys(ref.map)[0];
return `{[${name}]: ${this.renderTypeRef(ref.map[name])}}`;
}
if (isCallback(ref)) {
if (simplified) {
return this.renderTypeRef('Function');
}
const parameters = ref.callback.map(arg => this.renderNamedType(arg)).join(', ');
return `(${parameters}) => ${this.renderNamedType(ref.returns)}`;
}
throw new Error(this.def.type + ' - Can not render: ' + JSON.stringify(ref));
}
private renderNamedType({type, name}: {type?: schema.TypeReference, name?: string}) {
return typeof type === 'string'
? this.renderTypeLink(name || type, type)
: this.renderTypeRef(type);
}
private renderPropertyAsComment(property: schema.Property) {
if (!property.default && !property.description && !property.optional) {
return '';
}
const comment: string[] = [];
if (property.description) {
const desc = property.description;
comment.push(desc.endsWith('.') ? desc.slice(0, -1) : desc);
}
if (property.optional && !property.default) {
comment.push('optional');
}
if (property.default) {
comment.push('defaults to ' + property.default);
}
return ' // ' + comment.join('. ' + '');
}
/**
* @param content the link text
* @param type the type to link. If omitted the link text is used as the type
*/
private renderTypeLink(content: string, type?: schema.TypeReference): string {
if (!content) {
throw new Error('Missing content in typeLink ' + this.def);
}
const typeName = (type && plainType(type) ? plainType(type) : content)
.replace(/"/, '\'')
.replace(/"$/, '\'');
const link = this.getTypeLink(typeName);
if (typeName === '(mixed)') {
throw new Error(content + '/' + JSON.stringify(type) + ' in ' + this.def.type + ' is mixed');
}
if (!link) {
console.log('No type link for ' + typeName);
return content;
}
return this.renderHtmlLink(content, link);
}
private getTypeLink(type: string): Link {
if (type === this.def.type) {
return {href: '#'};
}
if ((this.def.generics || []).some(param => param.name === type)) {
return {href: '#generics', title: 'Generic Parameter' + quot(type)};
}
return this.gen.typeLinks[type[0] === '\'' ? 'string' : type];
}
private renderJSXLink(type: schema.TypeReference) {
const name = plainType(type);
const element = tag(name + '/');
if (!this.getTypeLink(name)) {
throw new Error('No type link for ' + name);
}
return this.renderHtmlLink(element, this.getTypeLink(name));
}
private renderGenericsDef(generics: schema.GenericsDef) {
return tag(generics.map(({name}) => name).join(', '));
}
private renderHtmlLink(content: string, link: Link) {
const result = ['<a '];
if (link.href) {
result.push('href="', link.href.replace('.md', '.html'), '" ');
}
if (link.title) {
result.push('title="', link.title, '"');
}
result.push('>', content, '</a>');
return result.join('');
}
private literal(value: string | boolean | number | {[key: string]: string}, type: schema.TypeReference) {
if (type === 'string') {
return this.renderTypeLink('\'' + value + '\'', 'string');
}
return this.renderTypeLink(value + '', typeof value);
}
}
function compareTitles(entry1: TocEntry, entry2: TocEntry) {
if (entry1.priority) {
return -1;
}
if (entry2.priority) {
return 1;
}
const title1 = entry1.title;
const title2 = entry2.title;
if (isLowerCase(title1) !== isLowerCase(title2)) {
return isLowerCase(title1) ? -1 : 1;
}
return title1.localeCompare(title2, 'en');
}
function isLowerCase(str: string): boolean {
return str.charAt(0).toLowerCase() === str.charAt(0);
}
function isFileSync(fpath: string): boolean {
try {
return fs.statSync(fpath).isFile();
} catch (err) {
if (err.code === 'ENOENT') {
return false;
}
throw err;
}
}
function isJS(member: Member) {
return asArray(member).some(variant => !variant.ts_only);
}
function isPublic(def: Member) {
return asArray(def).some(member => !member.protected && !member.private);
}
function isProtected(def: Member) {
return asArray(def).some(member => member.protected);
}
function renderImages(def: ExtendedApi) {
const androidImage = join('img/android', parse(def.file).name + '.png').replace(/\\/g, '/');
const iosImage = join('img/ios', parse(def.file).name + '.png').replace(/\\/g, '/');
const result = createImageFigure(def, androidImage, 'Android') + createImageFigure(def, iosImage, 'iOS');
if (result.length !== 0) {
return `<div class="tabris-image">${result}</div>\n`;
}
if (def.type !== 'Widget' && (def.isWidget || def.extends === 'Popup')) {
console.log('No image for ' + def.type);
}
return '';
}
function createImageFigure(def: ExtendedApi, image: string, platform: string): string {
// a <figure> shows a content element together with a descriptive caption
// the child <div> is required to scale the image inside the <figure> container
if (isFileSync(join('doc/api', image))) {
return `<figure><div><img srcset="${image} 2x" src="${image}" alt="${def.type} on ${platform}"/></div>` +
`<figcaption>${platform}</figcaption></figure>`;
} else {
return '';
}
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function isSnippet(obj: any): obj is schema.Snippet {
return typeof obj.snippet === 'string';
}
function fileExt(path: string) {
return path.split('.').pop();
}
// Source: https://stackoverflow.com/a/45130990
function getFiles(dir): string[] {
const dirents = fs.readdirSync(dir, {withFileTypes: true});
const files = dirents.map((dirent) => {
const res = resolve(dir, dirent.name);
return dirent.isDirectory() ? getFiles(res) : res;
});
return Array.prototype.concat(...files);
} | the_stack |
import * as dateFns from 'date-fns'
import * as FF from 'final-form'
import * as FP from 'fp-ts'
import * as IO from 'io-ts'
import * as R from 'ramda'
import * as React from 'react'
import * as RF from 'react-final-form'
import * as RRDom from 'react-router-dom'
import * as urql from 'urql'
import * as M from '@material-ui/core'
import * as Lab from '@material-ui/lab'
import BucketIcon from 'components/BucketIcon'
import * as Pagination from 'components/Pagination'
import Skeleton from 'components/Skeleton'
import * as Notifications from 'containers/Notifications'
import * as Model from 'model'
import * as APIConnector from 'utils/APIConnector'
import Delay from 'utils/Delay'
import * as Dialogs from 'utils/Dialogs'
import type FormSpec from 'utils/FormSpec'
import MetaTitle from 'utils/MetaTitle'
import * as NamedRoutes from 'utils/NamedRoutes'
import * as Sentry from 'utils/Sentry'
import assertNever from 'utils/assertNever'
import parseSearch from 'utils/parseSearch'
import { useTracker } from 'utils/tracking'
import * as Types from 'utils/types'
import * as validators from 'utils/validators'
import * as Form from '../Form'
import * as Table from '../Table'
import BUCKET_CONFIGS_QUERY from './gql/BucketConfigs.generated'
import ADD_MUTATION from './gql/BucketsAdd.generated'
import UPDATE_MUTATION from './gql/BucketsUpdate.generated'
import REMOVE_MUTATION from './gql/BucketsRemove.generated'
import { BucketConfigSelectionFragment as BucketConfig } from './gql/BucketConfigSelection.generated'
import CONTENT_INDEXING_SETTINGS_QUERY from './gql/ContentIndexingSettings.generated'
const SNS_ARN_RE = /^arn:aws(-|\w)*:sns:(-|\w)*:\d*:\S+$/
const DO_NOT_SUBSCRIBE_STR = 'DO_NOT_SUBSCRIBE'
const DO_NOT_SUBSCRIBE_SYM = Symbol(DO_NOT_SUBSCRIBE_STR)
type SnsFormValue = string | typeof DO_NOT_SUBSCRIBE_SYM
// eslint-disable-next-line @typescript-eslint/no-redeclare
const SnsFormValue = new IO.Type<SnsFormValue>(
'SnsFormValue',
(i): i is SnsFormValue => i === DO_NOT_SUBSCRIBE_SYM || IO.string.is(i),
(u, c) =>
u === DO_NOT_SUBSCRIBE_SYM || typeof u === 'string'
? IO.success(u)
: IO.failure(u, c),
R.identity,
)
const normalizeExtensions = FP.function.flow(
Types.decode(Types.fromNullable(IO.string, '')),
R.replace(/['"]/g, ''),
R.split(','),
R.map(R.pipe(R.trim, R.toLower)),
R.reject((t) => !t),
R.uniq,
R.sortBy(R.identity),
(exts) =>
exts.length ? (exts as FP.nonEmptyArray.NonEmptyArray<Types.NonEmptyString>) : null,
)
const EXT_RE = /\.[0-9a-z_]+/
const validateExtensions = FP.function.flow(normalizeExtensions, (exts) =>
exts && !exts.every(R.test(EXT_RE)) ? 'validExtensions' : undefined,
)
const integerInRange = (min: number, max: number) => (v: string | null | undefined) => {
if (!v) return undefined
const n = Number(v)
if (!Number.isInteger(n) || n < min || n > max) return 'integerInRange'
return undefined
}
const editFormSpec: FormSpec<Model.GQLTypes.BucketUpdateInput> = {
title: R.pipe(
R.prop('title'),
Types.decode(IO.string),
R.trim,
Types.decode(Types.NonEmptyString),
),
iconUrl: R.pipe(
R.prop('iconUrl'),
Types.decode(Types.fromNullable(IO.string, '')),
R.trim,
(s) => (s ? (s as Types.NonEmptyString) : null),
),
description: R.pipe(
R.prop('description'),
Types.decode(Types.fromNullable(IO.string, '')),
R.trim,
(s) => (s ? (s as Types.NonEmptyString) : null),
),
relevanceScore: R.pipe(
R.prop('relevanceScore'),
Types.decode(Types.fromNullable(IO.string, '')),
(s) => s || null,
Types.decode(Types.nullable(Types.IntFromString)),
),
overviewUrl: R.pipe(
R.prop('overviewUrl'),
Types.decode(Types.fromNullable(IO.string, '')),
R.trim,
(s) => (s ? (s as Types.NonEmptyString) : null),
),
tags: R.pipe(
R.prop('tags'),
Types.decode(Types.fromNullable(IO.string, '')),
R.split(','),
R.map(R.trim),
R.reject((t) => !t),
R.uniq,
(tags) =>
tags.length ? (tags as FP.nonEmptyArray.NonEmptyArray<Types.NonEmptyString>) : null,
),
linkedData: R.pipe(
R.prop('linkedData'),
Types.decode(Types.fromNullable(IO.string, '')),
(s) => s.trim() || 'null',
Types.decode(Types.withFallback(Types.JsonFromString, null)),
),
fileExtensionsToIndex: (values) =>
!values.enableDeepIndexing
? []
: FP.function.pipe(values.fileExtensionsToIndex, normalizeExtensions),
indexContentBytes: (values) =>
!values.enableDeepIndexing
? 0
: FP.function.pipe(
values.indexContentBytes,
Types.decode(Types.fromNullable(IO.string, '')),
R.trim,
R.ifElse(R.equals(''), R.always(null), Types.decode(Types.IntFromString)),
),
scannerParallelShardsDepth: R.pipe(
R.prop('scannerParallelShardsDepth'),
Types.decode(Types.fromNullable(IO.string, '')),
(s) => s || null,
Types.decode(Types.nullable(Types.IntFromString)),
),
snsNotificationArn: R.pipe(
R.prop('snsNotificationArn'),
Types.decode(Types.nullable(SnsFormValue)),
(v) => {
if (v === DO_NOT_SUBSCRIBE_SYM) return DO_NOT_SUBSCRIBE_STR as Types.NonEmptyString
const trimmed = v?.trim()
if (trimmed) return trimmed as Types.NonEmptyString
return null
},
),
skipMetaDataIndexing: R.pipe(
R.prop('skipMetaDataIndexing'),
Types.decode(Types.fromNullable(IO.boolean, false)),
),
setVersioning: () => null,
}
const addFormSpec: FormSpec<Model.GQLTypes.BucketAddInput> = {
...editFormSpec,
name: R.pipe(
R.prop('name'),
Types.decode(IO.string),
R.trim,
Types.decode(Types.NonEmptyString),
),
delayScan: R.pipe(
R.prop('delayScan'),
Types.decode(Types.fromNullable(IO.boolean, false)),
),
}
function validateSns(v: SnsFormValue) {
if (!v) return undefined
if (v === DO_NOT_SUBSCRIBE_SYM) return undefined
return SNS_ARN_RE.test(v) ? undefined : 'invalidArn'
}
const snsErrors = {
invalidArn: 'Enter a valid SNS topic ARN or leave blank',
topicNotFound: 'No such topic, enter a valid SNS topic ARN or leave blank',
configurationError: 'Notification configuration error',
}
function SnsField({
input: { onChange, value = '' },
meta,
}: RF.FieldRenderProps<SnsFormValue>) {
const error = meta.submitFailed && (meta.error || meta.submitError)
const handleSkipChange = React.useCallback(
(_e, checked) => {
onChange(checked ? DO_NOT_SUBSCRIBE_SYM : '')
},
[onChange],
)
const handleArnChange = React.useCallback(
(e) => {
onChange(e.target.value)
},
[onChange],
)
return (
<M.Box mt={2}>
<Form.Checkbox
meta={meta}
checked={value === DO_NOT_SUBSCRIBE_SYM}
onChange={handleSkipChange}
label="Skip S3 notifications (not recommended)"
/>
<M.TextField
error={!!error}
helperText={error ? (snsErrors as $TSFixMe)[error] || error : undefined}
label="SNS Topic ARN"
placeholder="Enter ARN (leave blank to auto-fill)"
fullWidth
margin="normal"
disabled={
value === DO_NOT_SUBSCRIBE_SYM || meta.submitting || meta.submitSucceeded
}
onChange={handleArnChange}
value={value === DO_NOT_SUBSCRIBE_SYM ? DO_NOT_SUBSCRIBE_STR : value}
InputLabelProps={{ shrink: true }}
/>
</M.Box>
)
}
const useHintStyles = M.makeStyles((t) => ({
icon: {
fontSize: '1.125em',
marginLeft: t.spacing(0.5),
marginTop: -1,
opacity: 0.5,
verticalAlign: -4,
'&:hover': {
opacity: 1,
},
},
tooltip: {
'& ul': {
marginBottom: 0,
marginTop: t.spacing(0.5),
paddingLeft: t.spacing(2),
},
},
}))
interface HintProps {
children: M.TooltipProps['title']
}
function Hint({ children }: HintProps) {
const classes = useHintStyles()
return (
<M.Tooltip arrow title={children} classes={{ tooltip: classes.tooltip }}>
<M.Icon fontSize="small" className={classes.icon}>
help
</M.Icon>
</M.Tooltip>
)
}
const useBucketFieldsStyles = M.makeStyles((t) => ({
group: {
'& > *:first-child': {
marginTop: 0,
},
},
panel: {
margin: '0 !important',
'&::before': {
content: 'none',
},
},
panelSummary: {
padding: 0,
minHeight: 'auto !important',
},
panelSummaryContent: {
margin: `${t.spacing(1)}px 0 !important`,
},
warning: {
background: t.palette.warning.main,
marginBottom: t.spacing(1),
marginTop: t.spacing(2),
},
warningIcon: {
color: t.palette.warning.dark,
},
}))
interface BucketFieldsProps {
bucket?: BucketConfig
reindex?: () => void
}
function BucketFields({ bucket, reindex }: BucketFieldsProps) {
const classes = useBucketFieldsStyles()
const [{ error, data }] = urql.useQuery({ query: CONTENT_INDEXING_SETTINGS_QUERY })
if (!data && error) throw error
const sentry = Sentry.use()
React.useEffect(() => {
if (data && error) sentry('captureException', error)
}, [error, data, sentry])
const settings = data!.config.contentIndexingSettings
return (
<M.Box>
<M.Box className={classes.group} mt={-1} pb={2}>
{bucket ? (
<M.TextField
label="Name"
value={bucket.name}
fullWidth
margin="normal"
disabled
/>
) : (
<RF.Field
component={Form.Field}
name="name"
label="Name"
placeholder="Enter an S3 bucket name"
parse={R.pipe(
R.toLower,
R.replace(/[^a-z0-9-.]/g, ''),
R.take(63) as (s: string) => string,
)}
validate={validators.required as FF.FieldValidator<any>}
errors={{
required: 'Enter a bucket name',
conflict: 'Bucket already added',
noSuchBucket: 'No such bucket',
}}
fullWidth
margin="normal"
/>
)}
<RF.Field
component={Form.Field}
name="title"
label="Title"
placeholder='e.g. "Production analytics data"'
parse={R.pipe(R.replace(/^\s+/g, ''), R.take(256) as (s: string) => string)}
validate={validators.required as FF.FieldValidator<any>}
errors={{
required: 'Enter a bucket title',
}}
fullWidth
margin="normal"
/>
<RF.Field
component={Form.Field}
name="iconUrl"
label="Icon URL (optional)"
placeholder="e.g. https://some-cdn.com/icon.png"
helperText="Recommended size: 80x80px"
parse={R.pipe(R.trim, R.take(1024) as (s: string) => string)}
fullWidth
margin="normal"
/>
<RF.Field
component={Form.Field}
name="description"
label="Description (optional)"
placeholder="Enter description if required"
parse={R.pipe(R.replace(/^\s+/g, ''), R.take(1024) as (s: string) => string)}
multiline
rows={1}
rowsMax={3}
fullWidth
margin="normal"
/>
</M.Box>
<M.Accordion elevation={0} className={classes.panel}>
<M.AccordionSummary
expandIcon={<M.Icon>expand_more</M.Icon>}
classes={{
root: classes.panelSummary,
content: classes.panelSummaryContent,
}}
>
<M.Typography variant="h6">Metadata</M.Typography>
</M.AccordionSummary>
<M.Box className={classes.group} pt={1} pb={2}>
<RF.Field
component={Form.Field}
name="relevanceScore"
label="Relevance score"
placeholder="Higher numbers appear first, -1 to hide"
parse={R.pipe(
R.replace(/[^0-9-]/g, ''),
R.replace(/(.+)-+$/g, '$1'),
R.take(16) as (s: string) => string,
)}
validate={validators.integer as FF.FieldValidator<any>}
errors={{
integer: 'Enter a valid integer',
}}
fullWidth
margin="normal"
/>
<RF.Field
component={Form.Field}
name="tags"
label="Tags (comma-separated)"
placeholder='e.g. "geospatial", for bucket discovery'
fullWidth
margin="normal"
multiline
rows={1}
rowsMax={3}
/>
<RF.Field
component={Form.Field}
name="overviewUrl"
label="Overview URL"
parse={R.trim}
fullWidth
margin="normal"
/>
<RF.Field
component={Form.Field}
name="linkedData"
label="Structured data (JSON-LD)"
validate={validators.jsonObject as FF.FieldValidator<any>}
errors={{
jsonObject: 'Must be a valid JSON object',
}}
fullWidth
multiline
rows={1}
rowsMax={10}
margin="normal"
/>
</M.Box>
</M.Accordion>
<M.Accordion elevation={0} className={classes.panel}>
<M.AccordionSummary
expandIcon={<M.Icon>expand_more</M.Icon>}
classes={{
root: classes.panelSummary,
content: classes.panelSummaryContent,
}}
>
<M.Typography variant="h6">Indexing and notifications</M.Typography>
</M.AccordionSummary>
<M.Box className={classes.group} pt={1}>
{!!reindex && (
<M.Box pb={2.5}>
<M.Button variant="outlined" fullWidth onClick={reindex}>
Re-index and repair
</M.Button>
</M.Box>
)}
<RF.Field
component={Form.Checkbox}
type="checkbox"
name="enableDeepIndexing"
label={
<>
Enable deep indexing
<Hint>
Deep indexing adds the <em>contents</em> of an object to your search
index, while shallow indexing only covers object metadata. Deep indexing
may require more disk in ElasticSearch. Enable deep indexing when you
want your users to find files by their contents.
</Hint>
</>
}
/>
<RF.FormSpy subscription={{ modified: true, values: true }}>
{({ modified, values }) => {
// don't need this while adding a bucket
if (!bucket) return null
if (
!modified?.enableDeepIndexing &&
!modified?.fileExtensionsToIndex &&
!modified?.indexContentBytes
)
return null
try {
if (
R.equals(
bucket.fileExtensionsToIndex,
editFormSpec.fileExtensionsToIndex(values),
) &&
R.equals(
bucket.indexContentBytes,
editFormSpec.indexContentBytes(values),
)
)
return null
} catch {
return null
}
return (
<Lab.Alert
className={classes.warning}
icon={
<M.Icon fontSize="inherit" className={classes.warningIcon}>
error
</M.Icon>
}
severity="warning"
>
Changing these settings affects files that are indexed after the change.
If you wish to deep index existing files, click{' '}
<strong>"Re-index and repair"</strong>.
</Lab.Alert>
)
}}
</RF.FormSpy>
<RF.FormSpy subscription={{ values: true }}>
{({ values }) => {
if (!values.enableDeepIndexing) return null
return (
<>
<RF.Field
component={Form.Field}
name="fileExtensionsToIndex"
label={
<>
File extensions to deep index (comma-separated)
<Hint>
Default extensions:
<ul>
{settings.extensions.map((ext) => (
<li key={ext}>{ext}</li>
))}
</ul>
</Hint>
</>
}
placeholder='e.g. ".txt, .md", leave blank to use default settings'
validate={validateExtensions}
errors={{
validExtensions: (
<>
Enter a comma-separated list of{' '}
<abbr title="Must start with the dot and contain only alphanumeric characters thereafter">
valid
</abbr>{' '}
file extensions
</>
),
}}
fullWidth
margin="normal"
multiline
rows={1}
rowsMax={3}
/>
<RF.Field
component={Form.Field}
name="indexContentBytes"
label={
<>
Content bytes to deep index
<Hint>Defaults to {settings.bytesDefault}</Hint>
</>
}
placeholder='e.g. "1024", leave blank to use default settings'
parse={R.replace(/[^0-9]/g, '')}
validate={integerInRange(settings.bytesMin, settings.bytesMax)}
errors={{
integerInRange: (
<>
Enter an integer from {settings.bytesMin} to {settings.bytesMax}
</>
),
}}
fullWidth
margin="normal"
/>
</>
)
}}
</RF.FormSpy>
<RF.Field
component={Form.Field}
name="scannerParallelShardsDepth"
label="Scanner parallel shards depth"
placeholder="Leave blank to use default settings"
validate={validators.integer as FF.FieldValidator<any>}
errors={{
integer: 'Enter a valid integer',
}}
parse={R.pipe(R.replace(/[^0-9]/g, ''), R.take(16) as (s: string) => string)}
fullWidth
margin="normal"
/>
<RF.Field
component={SnsField}
name="snsNotificationArn"
validate={validateSns}
/>
<M.Box mt={2}>
<RF.Field
component={Form.Checkbox}
type="checkbox"
name="skipMetaDataIndexing"
label="Skip metadata indexing"
/>
</M.Box>
{!bucket && (
<M.Box mt={1}>
<RF.Field
component={Form.Checkbox}
type="checkbox"
name="delayScan"
label="Delay scan"
/>
</M.Box>
)}
</M.Box>
</M.Accordion>
</M.Box>
)
}
function BucketFieldsPlaceholder() {
return (
<>
{R.times(
(i) => (
<Skeleton key={i} height={48} mt={i ? 3 : 0} />
),
5,
)}
</>
)
}
interface AddProps {
close: (reason?: string) => void
}
function Add({ close }: AddProps) {
const { push } = Notifications.use()
const t = useTracker()
const [, add] = urql.useMutation(ADD_MUTATION)
const onSubmit = React.useCallback(
async (values) => {
try {
const input = R.applySpec(addFormSpec)(values)
const res = await add({ input })
if (res.error) throw res.error
if (!res.data) throw new Error('No data')
const r = res.data.bucketAdd
switch (r.__typename) {
case 'BucketAddSuccess':
push(`Bucket "${r.bucketConfig.name}" added`)
t.track('WEB', {
type: 'admin',
action: 'bucket add',
bucket: r.bucketConfig.name,
})
close()
return undefined
case 'BucketAlreadyAdded':
return { name: 'conflict' }
case 'BucketDoesNotExist':
return { name: 'noSuchBucket' }
case 'SnsInvalid':
// shouldnt happen since we're validating it
return { snsNotificationArn: 'invalidArn' }
case 'NotificationTopicNotFound':
return { snsNotificationArn: 'topicNotFound' }
case 'NotificationConfigurationError':
return {
snsNotificationArn: 'configurationError',
[FF.FORM_ERROR]: 'notificationConfigurationError',
}
case 'InsufficientPermissions':
return { [FF.FORM_ERROR]: 'insufficientPermissions' }
case 'BucketIndexContentBytesInvalid':
// shouldnt happen since we valide input
return { indexContentBytes: 'integerInRange' }
case 'BucketFileExtensionsToIndexInvalid':
// shouldnt happen since we valide input
return { fileExtensionsToIndex: 'validExtensions' }
default:
return assertNever(r)
}
} catch (e) {
// eslint-disable-next-line no-console
console.error('Error adding bucket')
// eslint-disable-next-line no-console
console.error(e)
return { [FF.FORM_ERROR]: 'unexpected' }
}
},
[add, push, close, t],
)
return (
<RF.Form onSubmit={onSubmit} initialValues={{ enableDeepIndexing: true }}>
{({
handleSubmit,
submitting,
submitFailed,
error,
submitError,
hasValidationErrors,
}) => (
<>
<M.DialogTitle>Add a bucket</M.DialogTitle>
<M.DialogContent>
<React.Suspense fallback={<BucketFieldsPlaceholder />}>
<form onSubmit={handleSubmit}>
<BucketFields />
{submitFailed && (
<Form.FormError
error={error || submitError}
errors={{
unexpected: 'Something went wrong',
notificationConfigurationError: 'Notification configuration error',
insufficientPermissions: 'Insufficient permissions',
}}
/>
)}
<input type="submit" style={{ display: 'none' }} />
</form>
</React.Suspense>
</M.DialogContent>
<M.DialogActions>
{submitting && (
<Delay>
{() => (
<M.Box flexGrow={1} display="flex" pl={2}>
<M.CircularProgress size={24} />
</M.Box>
)}
</Delay>
)}
<M.Button
onClick={() => close('cancel')}
color="primary"
disabled={submitting}
>
Cancel
</M.Button>
<M.Button
onClick={handleSubmit}
color="primary"
disabled={submitting || (submitFailed && hasValidationErrors)}
>
Add
</M.Button>
</M.DialogActions>
</>
)}
</RF.Form>
)
}
interface ReindexProps {
bucket: string
open: boolean
close: () => void
}
function Reindex({ bucket, open, close }: ReindexProps) {
const req = APIConnector.use()
const [repair, setRepair] = React.useState(false)
const [submitting, setSubmitting] = React.useState(false)
const [submitSucceeded, setSubmitSucceeded] = React.useState(false)
const [error, setError] = React.useState<string | false>(false)
const reset = React.useCallback(() => {
setSubmitting(false)
setSubmitSucceeded(false)
setRepair(false)
setError(false)
}, [])
const handleRepairChange = React.useCallback((_e, v) => {
setRepair(v)
}, [])
const reindex = React.useCallback(async () => {
if (submitting) return
setError(false)
setSubmitting(true)
try {
// TODO: use graphql mutation
await req({
endpoint: `/admin/reindex/${bucket}`,
method: 'POST',
body: { repair: repair || undefined },
})
setSubmitSucceeded(true)
} catch (e) {
if (APIConnector.HTTPError.is(e, 404, 'Bucket not found')) {
setError('Bucket not found')
} else if (APIConnector.HTTPError.is(e, 409, /in progress/)) {
setError('Indexing already in progress')
} else {
// eslint-disable-next-line no-console
console.log('Error re-indexing bucket:')
// eslint-disable-next-line no-console
console.error(e)
setError('Unexpected error')
}
}
setSubmitting(false)
}, [submitting, req, bucket, repair])
const handleClose = React.useCallback(() => {
if (submitting) return
close()
}, [submitting, close])
return (
<M.Dialog open={open} onClose={handleClose} onExited={reset} fullWidth>
<M.DialogTitle>Re-index and repair a bucket</M.DialogTitle>
{submitSucceeded ? (
<M.DialogContent>
<M.DialogContentText color="textPrimary">
We have {repair && <>repaired S3 notifications and </>}
started re-indexing the bucket.
</M.DialogContentText>
</M.DialogContent>
) : (
<M.DialogContent>
<M.DialogContentText color="textPrimary">
You are about to start re-indexing the <b>"{bucket}"</b> bucket
</M.DialogContentText>
<Form.Checkbox
meta={{ submitting, submitSucceeded }}
// @ts-expect-error, FF.FieldInputProps misses second argument for onChange
input={{ checked: repair, onChange: handleRepairChange }}
label="Repair S3 notifications"
/>
{repair && (
<M.Box color="warning.dark" ml={4}>
<M.Typography color="inherit" variant="caption">
Bucket notifications will be overwritten
</M.Typography>
</M.Box>
)}
</M.DialogContent>
)}
<M.DialogActions>
{submitting && (
<Delay>
{() => (
<M.Box flexGrow={1} display="flex" pl={2}>
<M.CircularProgress size={24} />
</M.Box>
)}
</Delay>
)}
{!submitting && !!error && (
<M.Box flexGrow={1} display="flex" alignItems="center" pl={2}>
<M.Icon color="error">error_outline</M.Icon>
<M.Box pl={1} />
<M.Typography variant="body2" color="error">
{error}
</M.Typography>
</M.Box>
)}
{submitSucceeded ? (
<>
<M.Button onClick={close} color="primary">
Close
</M.Button>
</>
) : (
<>
<M.Button onClick={close} disabled={submitting} color="primary">
Cancel
</M.Button>
<M.Button onClick={reindex} disabled={submitting} color="primary">
Re-index
{repair && <> and repair</>}
</M.Button>
</>
)}
</M.DialogActions>
</M.Dialog>
)
}
interface EditProps {
bucket: BucketConfig
close: (reason?: string) => void
}
function Edit({ bucket, close }: EditProps) {
const [, update] = urql.useMutation(UPDATE_MUTATION)
const [reindexOpen, setReindexOpen] = React.useState(false)
const openReindex = React.useCallback(() => setReindexOpen(true), [])
const closeReindex = React.useCallback(() => setReindexOpen(false), [])
const onSubmit = React.useCallback(
async (values) => {
try {
const input = R.applySpec(editFormSpec)(values)
const res = await update({ name: bucket.name, input })
if (res.error) throw res.error
if (!res.data) throw new Error('No data')
const r = res.data.bucketUpdate
switch (r.__typename) {
case 'BucketUpdateSuccess':
close()
return undefined
case 'SnsInvalid':
// shouldnt happen since we're validating it
return { snsNotificationArn: 'invalidArn' }
case 'NotificationTopicNotFound':
return { snsNotificationArn: 'topicNotFound' }
case 'NotificationConfigurationError':
return {
snsNotificationArn: 'configurationError',
[FF.FORM_ERROR]: 'notificationConfigurationError',
}
case 'BucketNotFound':
return { [FF.FORM_ERROR]: 'bucketNotFound' }
case 'BucketIndexContentBytesInvalid':
// shouldnt happen since we valide input
return { indexContentBytes: 'integerInRange' }
case 'BucketFileExtensionsToIndexInvalid':
// shouldnt happen since we valide input
return { fileExtensionsToIndex: 'validExtensions' }
default:
return assertNever(r)
}
} catch (e) {
// eslint-disable-next-line no-console
console.error('Error updating bucket')
// eslint-disable-next-line no-console
console.error(e)
return { [FF.FORM_ERROR]: 'unexpected' }
}
},
[update, close, bucket.name],
)
const initialValues = {
title: bucket.title,
iconUrl: bucket.iconUrl || '',
description: bucket.description || '',
relevanceScore: bucket.relevanceScore.toString(),
overviewUrl: bucket.overviewUrl || '',
tags: (bucket.tags || []).join(', '),
linkedData: bucket.linkedData ? JSON.stringify(bucket.linkedData) : '',
enableDeepIndexing:
!R.equals(bucket.fileExtensionsToIndex, []) && bucket.indexContentBytes !== 0,
fileExtensionsToIndex: (bucket.fileExtensionsToIndex || []).join(', '),
indexContentBytes: bucket.indexContentBytes,
scannerParallelShardsDepth: bucket.scannerParallelShardsDepth?.toString() || '',
snsNotificationArn:
bucket.snsNotificationArn === DO_NOT_SUBSCRIBE_STR
? DO_NOT_SUBSCRIBE_SYM
: bucket.snsNotificationArn,
skipMetaDataIndexing: bucket.skipMetaDataIndexing ?? false,
}
return (
<RF.Form onSubmit={onSubmit} initialValues={initialValues}>
{({
handleSubmit,
submitting,
submitFailed,
error,
submitError,
hasValidationErrors,
pristine,
form,
}) => (
<>
<Reindex bucket={bucket.name} open={reindexOpen} close={closeReindex} />
<M.DialogTitle>Edit the "{bucket.name}" bucket</M.DialogTitle>
<M.DialogContent>
<React.Suspense fallback={<BucketFieldsPlaceholder />}>
<form onSubmit={handleSubmit}>
<BucketFields bucket={bucket} reindex={openReindex} />
{submitFailed && (
<Form.FormError
error={error || submitError}
errors={{
unexpected: 'Something went wrong',
notificationConfigurationError: 'Notification configuration error',
bucketNotFound: 'Bucket not found',
}}
/>
)}
<input type="submit" style={{ display: 'none' }} />
</form>
</React.Suspense>
</M.DialogContent>
<M.DialogActions>
{submitting && (
<Delay>
{() => (
<M.Box flexGrow={1} display="flex" pl={2}>
<M.CircularProgress size={24} />
</M.Box>
)}
</Delay>
)}
<M.Button
onClick={() => form.reset()}
color="primary"
disabled={pristine || submitting}
>
Reset
</M.Button>
<M.Button
onClick={() => close('cancel')}
color="primary"
disabled={submitting}
>
Cancel
</M.Button>
<M.Button
onClick={handleSubmit}
color="primary"
disabled={pristine || submitting || (submitFailed && hasValidationErrors)}
>
Save
</M.Button>
</M.DialogActions>
</>
)}
</RF.Form>
)
}
interface DeleteProps {
bucket: BucketConfig
close: (reason?: string) => void
}
function Delete({ bucket, close }: DeleteProps) {
const { push } = Notifications.use()
const t = useTracker()
const [, rm] = urql.useMutation(REMOVE_MUTATION)
const doDelete = React.useCallback(async () => {
close()
try {
const res = await rm({ name: bucket.name })
if (res.error) throw res.error
if (!res.data) throw new Error('No data')
const r = res.data.bucketRemove
switch (r.__typename) {
case 'BucketRemoveSuccess':
t.track('WEB', { type: 'admin', action: 'bucket delete', bucket: bucket.name })
return
case 'IndexingInProgress':
push(`Can't delete bucket "${bucket.name}" while it's being indexed`)
return
case 'BucketNotFound':
push(`Can't delete bucket "${bucket.name}": not found`)
return
default:
assertNever(r)
}
} catch (e) {
push(`Error deleting bucket "${bucket.name}"`)
// eslint-disable-next-line no-console
console.error('Error deleting bucket')
// eslint-disable-next-line no-console
console.error(e)
}
}, [bucket, close, rm, push, t])
return (
<>
<M.DialogTitle>Delete a bucket</M.DialogTitle>
<M.DialogContent>
You are about to disconnect "{bucket.name}" from Quilt. The search index
will be deleted. Bucket contents will remain unchanged.
</M.DialogContent>
<M.DialogActions>
<M.Button onClick={() => close('cancel')} color="primary">
Cancel
</M.Button>
<M.Button onClick={doDelete} color="primary">
Delete
</M.Button>
</M.DialogActions>
</>
)
}
const useCustomBucketIconStyles = M.makeStyles({
stub: {
opacity: 0.7,
},
})
interface CustomBucketIconProps {
src: string
}
function CustomBucketIcon({ src }: CustomBucketIconProps) {
const classes = useCustomBucketIconStyles()
return <BucketIcon alt="" classes={classes} src={src} title="Default icon" />
}
const columns = [
{
id: 'name',
label: 'Name (relevance)',
getValue: R.prop('name'),
getDisplay: (v: string, b: BucketConfig) => (
<span>
<M.Box fontFamily="monospace.fontFamily" component="span">
{v}
</M.Box>{' '}
<M.Box color="text.secondary" component="span">
({b.relevanceScore})
</M.Box>
</span>
),
},
{
id: 'icon',
label: 'Icon',
sortable: false,
align: 'center',
getValue: R.prop('iconUrl'),
getDisplay: (v: string) => <CustomBucketIcon src={v} />,
},
{
id: 'title',
label: 'Title',
getValue: R.prop('title'),
getDisplay: (v: string) => (
<M.Box
component="span"
maxWidth={240}
textOverflow="ellipsis"
overflow="hidden"
whiteSpace="nowrap"
display="inline-block"
>
{v}
</M.Box>
),
},
{
id: 'description',
label: 'Description',
getValue: R.prop('description'),
getDisplay: (v: string | undefined) =>
v ? (
<M.Box
component="span"
maxWidth={240}
textOverflow="ellipsis"
overflow="hidden"
whiteSpace="nowrap"
display="inline-block"
>
{v}
</M.Box>
) : (
<M.Box color="text.secondary" component="span">
{'<Empty>'}
</M.Box>
),
},
{
id: 'lastIndexed',
label: 'Last indexed',
getValue: R.prop('lastIndexed'),
getDisplay: (v: Date | undefined) =>
v ? (
<span title={v.toLocaleString()}>
{dateFns.formatDistanceToNow(v, { addSuffix: true })}
</span>
) : (
<M.Box color="text.secondary" component="span">
{'<N/A>'}
</M.Box>
),
},
]
interface CRUDProps {
bucketName?: string
}
function CRUD({ bucketName }: CRUDProps) {
const [{ error, data }] = urql.useQuery({ query: BUCKET_CONFIGS_QUERY })
if (!data && error) throw error
const sentry = Sentry.use()
React.useEffect(() => {
if (data && error) sentry('captureException', error)
}, [error, data, sentry])
const rows = data!.bucketConfigs
const ordering = Table.useOrdering({ rows, column: columns[0] })
const pagination = Pagination.use(ordering.ordered, {
// @ts-expect-error
getItemId: R.prop('name'),
})
const { open: openDialog, render: renderDialogs } = Dialogs.use()
const { urls } = NamedRoutes.use()
const history = RRDom.useHistory()
const toolbarActions = [
{
title: 'Add bucket',
icon: <M.Icon>add</M.Icon>,
fn: React.useCallback(() => {
// @ts-expect-error
openDialog(({ close }) => <Add {...{ close }} />)
}, [openDialog]),
},
]
const edit = (bucket: BucketConfig) => () => {
history.push(urls.adminBuckets(bucket.name))
}
const inlineActions = (bucket: BucketConfig) => [
{
title: 'Delete',
icon: <M.Icon>delete</M.Icon>,
fn: () => {
// @ts-expect-error
openDialog(({ close }) => <Delete {...{ bucket, close }} />)
},
},
{
title: 'Edit',
icon: <M.Icon>edit</M.Icon>,
fn: edit(bucket),
},
]
const editingBucket = React.useMemo(
() => (bucketName ? rows.find(({ name }) => name === bucketName) : null),
[bucketName, rows],
)
const onBucketClose = React.useCallback(() => {
history.push(urls.adminBuckets())
}, [history, urls])
if (bucketName && !editingBucket) {
// Bucket name set in URL, but it was not found in buckets list
return <RRDom.Redirect to={urls.adminBuckets()} />
}
return (
<M.Paper>
{renderDialogs({ maxWidth: 'xs', fullWidth: true })}
<M.Dialog open={!!editingBucket} fullWidth maxWidth="xs">
{editingBucket && <Edit bucket={editingBucket} close={onBucketClose} />}
</M.Dialog>
<Table.Toolbar heading="Buckets" actions={toolbarActions} />
<Table.Wrapper>
<M.Table size="small">
<Table.Head columns={columns} ordering={ordering} withInlineActions />
<M.TableBody>
{pagination.paginated.map((i: BucketConfig) => (
<M.TableRow
hover
key={i.name}
onClick={edit(i)}
style={{ cursor: 'pointer' }}
>
{columns.map((col) => (
// @ts-expect-error
<M.TableCell key={col.id} align={col.align} {...col.props}>
{/* @ts-expect-error */}
{(col.getDisplay || R.identity)(col.getValue(i), i)}
</M.TableCell>
))}
<M.TableCell
align="right"
padding="none"
onClick={(e) => e.stopPropagation()}
>
<Table.InlineActions actions={inlineActions(i)} />
</M.TableCell>
</M.TableRow>
))}
</M.TableBody>
</M.Table>
</Table.Wrapper>
<Table.Pagination pagination={pagination} />
</M.Paper>
)
}
export default function Buckets({ location }: RRDom.RouteComponentProps) {
const { bucket } = parseSearch(location.search)
const bucketName = Array.isArray(bucket) ? bucket[0] : bucket
return (
<M.Box mt={2} mb={2}>
<MetaTitle>{['Buckets', 'Admin']}</MetaTitle>
<React.Suspense
fallback={
<M.Paper>
<Table.Toolbar heading="Buckets" />
<Table.Progress />
</M.Paper>
}
>
<CRUD bucketName={bucketName} />
</React.Suspense>
</M.Box>
)
} | the_stack |
import * as fc from '../../../../lib/fast-check';
import { StreamArbitrary } from '../../../../src/arbitrary/_internals/StreamArbitrary';
import { NextValue } from '../../../../src/check/arbitrary/definition/NextValue';
import { cloneIfNeeded, hasCloneMethod } from '../../../../src/check/symbols';
import { Stream } from '../../../../src/stream/Stream';
import {
assertProduceCorrectValues,
assertProduceSameValueGivenSameSeed,
} from '../__test-helpers__/NextArbitraryAssertions';
import { FakeIntegerArbitrary, fakeNextArbitrary } from '../__test-helpers__/NextArbitraryHelpers';
import { fakeRandom } from '../__test-helpers__/RandomHelpers';
import * as StringifyMock from '../../../../src/utils/stringify';
function beforeEachHook() {
jest.resetModules();
jest.restoreAllMocks();
fc.configureGlobal({ beforeEach: beforeEachHook });
}
beforeEach(beforeEachHook);
describe('StreamArbitrary', () => {
describe('generate', () => {
it('should produce a cloneable instance of Stream', () => {
// Arrange
const biasFactor = 48;
const { instance: sourceArb } = fakeNextArbitrary();
const { instance: mrng } = fakeRandom();
// Act
const arb = new StreamArbitrary(sourceArb);
const out = arb.generate(mrng, biasFactor);
// Assert
expect(out.value).toBeInstanceOf(Stream);
expect(out.hasToBeCloned).toBe(true);
expect(hasCloneMethod(out.value)).toBe(true);
});
it('should not call generate before we pull from the Stream but decide bias', () => {
// Arrange
const biasFactor = 48;
const { instance: sourceArb, generate } = fakeNextArbitrary();
const { instance: mrng, nextInt } = fakeRandom();
// Act
const arb = new StreamArbitrary(sourceArb);
arb.generate(mrng, biasFactor).value;
// Assert
expect(nextInt).toHaveBeenCalledTimes(1);
expect(nextInt).toHaveBeenCalledWith(1, biasFactor);
expect(generate).not.toHaveBeenCalled();
});
it('should not check bias again for cloned instances', () => {
// Arrange
const biasFactor = 48;
const { instance: sourceArb } = fakeNextArbitrary();
const { instance: mrng, nextInt } = fakeRandom();
// Act
const arb = new StreamArbitrary(sourceArb);
const out = arb.generate(mrng, biasFactor);
const s1 = out.value;
const s2 = out.value;
// Assert
expect(nextInt).toHaveBeenCalledTimes(1);
expect(nextInt).toHaveBeenCalledWith(1, biasFactor);
expect(s2).not.toBe(s1);
});
it('should call generate with cloned instance of Random as we pull from the Stream', () => {
// Arrange
const numValuesToPull = 5;
const biasFactor = 48;
let index = 0;
const expectedValues = [...Array(numValuesToPull)].map(() => Symbol());
const { instance: sourceArb, generate } = fakeNextArbitrary();
generate.mockImplementation(() => new NextValue(expectedValues[index++], undefined));
const { instance: mrng, clone, nextInt } = fakeRandom();
nextInt.mockReturnValueOnce(1); // for bias
const { instance: mrngCloned } = fakeRandom();
clone.mockReturnValueOnce(mrngCloned);
// Act
const arb = new StreamArbitrary(sourceArb);
const stream = arb.generate(mrng, biasFactor).value;
const values = [...stream.take(numValuesToPull)];
// Assert
expect(generate).toHaveBeenCalledTimes(numValuesToPull);
for (const call of generate.mock.calls) {
expect(call).toEqual([mrngCloned, biasFactor]);
}
expect(values).toEqual(expectedValues);
});
it('should call generate with cloned instance of Random specific for each Stream', () => {
// Arrange
const numValuesToPullS1 = 5;
const numValuesToPullS2 = 3;
const biasFactor = 48;
const { instance: sourceArb, generate } = fakeNextArbitrary();
generate.mockImplementation(() => new NextValue(Symbol(), undefined));
const { instance: mrng, clone, nextInt } = fakeRandom();
nextInt.mockReturnValueOnce(1); // for bias
const { instance: mrngClonedA } = fakeRandom();
const { instance: mrngClonedB } = fakeRandom();
clone.mockReturnValueOnce(mrngClonedA).mockReturnValueOnce(mrngClonedB);
// Act
const arb = new StreamArbitrary(sourceArb);
const out = arb.generate(mrng, biasFactor);
const s1 = out.value;
const c1 = s1[Symbol.iterator]();
for (let i = 0; i !== numValuesToPullS1; ++i) {
const next = c1.next();
expect(next.done).toBe(false);
}
const s2 = out.value;
const c2 = s2[Symbol.iterator]();
for (let i = 0; i !== numValuesToPullS2; ++i) {
const next = c2.next();
expect(next.done).toBe(false);
}
c1.next();
// Assert
expect(generate).toHaveBeenCalledTimes(numValuesToPullS1 + numValuesToPullS2 + 1);
const calls = generate.mock.calls;
for (let i = 0; i !== numValuesToPullS1; ++i) {
const call = calls[i];
expect(call).toEqual([mrngClonedA, biasFactor]);
}
for (let i = 0; i !== numValuesToPullS2; ++i) {
const call = calls[numValuesToPullS1 + i];
expect(call).toEqual([mrngClonedB, biasFactor]);
}
expect(calls[numValuesToPullS1 + numValuesToPullS2]).toEqual([mrngClonedA, biasFactor]);
});
it('should only print pulled values on print', () =>
fc.assert(
fc.property(fc.array(fc.integer()), (expectedValues) => {
// Arrange
const biasFactor = 48;
let index = 0;
const { instance: sourceArb, generate } = fakeNextArbitrary<number>();
generate.mockImplementation(() => new NextValue(expectedValues[index++], undefined));
const { instance: mrng, clone, nextInt } = fakeRandom();
nextInt.mockReturnValueOnce(2); // for no bias
const { instance: mrngCloned } = fakeRandom();
clone.mockReturnValueOnce(mrngCloned);
const fakeStringify = (v: unknown) => '<' + String(v) + '>';
const stringify = jest.spyOn(StringifyMock, 'stringify');
stringify.mockImplementation(fakeStringify);
// Act
const arb = new StreamArbitrary(sourceArb);
const stream = arb.generate(mrng, biasFactor).value;
const values = [...stream.take(expectedValues.length)];
// Assert
expect(values).toEqual(expectedValues);
expect(String(stream)).toEqual(`Stream(${expectedValues.map(fakeStringify).join(',')}…)`);
expect(stringify).toHaveBeenCalledTimes(expectedValues.length);
expect(generate).toHaveBeenCalledTimes(expectedValues.length);
if (expectedValues.length > 0) {
expect(generate).toHaveBeenCalledWith(mrngCloned, undefined);
}
})
));
it('should create independant Stream even in terms of toString', () => {
// Arrange
const biasFactor = 48;
let index = 0;
const { instance: sourceArb, generate } = fakeNextArbitrary<number>();
generate.mockImplementation(() => new NextValue(index++, undefined));
const { instance: mrng, clone, nextInt } = fakeRandom();
nextInt.mockReturnValueOnce(2); // for no bias
const { instance: mrngCloned } = fakeRandom();
clone.mockReturnValueOnce(mrngCloned);
const stringify = jest.spyOn(StringifyMock, 'stringify');
stringify.mockImplementation((v) => '<' + String(v) + '>');
// Act
const arb = new StreamArbitrary(sourceArb);
const out = arb.generate(mrng, biasFactor);
const stream1 = out.value;
const stream2 = out.value;
const values1 = [...stream1.take(2)];
const values2 = [...stream2.take(3)];
const values1Bis = [...stream1.take(2)];
// Assert
expect(values1).toEqual([0, 1]);
expect(values2).toEqual([2, 3, 4]);
expect(values1Bis).toEqual([5, 6]);
expect(String(stream1)).toEqual(`Stream(<0>,<1>,<5>,<6>…)`);
expect(String(stream2)).toEqual(`Stream(<2>,<3>,<4>…)`);
expect(stringify).toHaveBeenCalledTimes(7);
expect(generate).toHaveBeenCalledTimes(7);
});
});
describe('canShrinkWithoutContext', () => {
function* infiniteG() {
yield 1;
}
it.each`
data | description
${Stream.nil()} | ${'empty stream'}
${Stream.of(1, 5, 6, 74, 4)} | ${'finite stream'}
${new Stream(infiniteG())} | ${'infinite stream'}
`('should return false for any Stream whatever the size ($description)', ({ data }) => {
// Arrange
const { instance: sourceArb, canShrinkWithoutContext } = fakeNextArbitrary();
// Act
const arb = new StreamArbitrary(sourceArb);
const out = arb.canShrinkWithoutContext(data);
// Assert
expect(out).toBe(false);
expect(canShrinkWithoutContext).not.toHaveBeenCalled();
});
it('should return false even for its own values', () => {
// Arrange
const { instance: sourceArb, canShrinkWithoutContext } = fakeNextArbitrary();
const { instance: mrng } = fakeRandom();
// Act
const arb = new StreamArbitrary(sourceArb);
const g = arb.generate(mrng, undefined);
const out = arb.canShrinkWithoutContext(g.value);
// Assert
expect(out).toBe(false);
expect(canShrinkWithoutContext).not.toHaveBeenCalled();
});
});
describe('shrink', () => {
it('should always shrink to nil', () => {
// Arrange
const { instance: sourceArb, generate, shrink } = fakeNextArbitrary<number>();
generate.mockReturnValue(new NextValue(0, undefined));
const { instance: mrng } = fakeRandom();
// Act
const arb = new StreamArbitrary(sourceArb);
const { value, context } = arb.generate(mrng, undefined);
const pullValues = [...value.take(50)];
const shrinks = [...arb.shrink(value, context)];
// Assert
expect(pullValues).toBeDefined();
expect(shrinks).toHaveLength(0);
expect(shrink).not.toHaveBeenCalled();
});
});
});
describe('StreamArbitrary (integration)', () => {
const sourceArb = new FakeIntegerArbitrary();
const isEqual = (s1: Stream<number>, s2: Stream<number>) => {
expect([...cloneIfNeeded(s1).take(10)]).toEqual([...cloneIfNeeded(s2).take(10)]);
};
const isCorrect = (value: Stream<number>) =>
value instanceof Stream && [...value.take(10)].every((v) => sourceArb.canShrinkWithoutContext(v));
const streamBuilder = () => new StreamArbitrary(sourceArb);
it('should produce the same values given the same seed', () => {
assertProduceSameValueGivenSameSeed(streamBuilder, { isEqual });
});
it('should only produce correct values', () => {
assertProduceCorrectValues(streamBuilder, isCorrect);
});
}); | the_stack |
import Game from '../Core/Game';
import SName from '../Core/SName';
import Hilo3d from '../Core/Hilo3d';
import {
ISeinNodeExtension, INodeWithGlTFExtensions,
ISeinPhysicBodyExtension, ISeinBoxColliderExtension, ISeinSphereColliderExtension,
ISeinRendererExtension, ISeinRendererGlobalExtension, ISeinAmbientLightExtension,
ISeinImageBasedLightingExtension, ISeinImageBasedLightingSourceExtension, ISeinImageBasedLight,
IGlTFParser, IGlTFModel, ISeinAtlasExtension, ISeinSpriteExtension, ISeinSpriteSourceExtension,
ISeinCustomMaterialSourceExtension, ISeinCubeTextureExtension, ISeinSkyboxExtension, ISeinTextureImproveExtension
} from '../types/Resource';
import SceneActor, {isSceneActor} from '../Renderer/ISceneActor';
import SceneComponent from '../Renderer/SceneComponent';
import RigidBodyComponent from '../Physic/RigidBodyComponent';
import BoxColliderComponent from '../Physic/BoxColliderComponent';
import SphereColliderComponent from '../Physic/SphereColliderComponent';
import Debug from '../Debug';
import Constants from '../Core/Constants';
import RawShaderMaterial, {isRawShaderMaterial} from '../Material/RawShaderMaterial';
import {isStaticMeshComponent} from '../Renderer/StaticMeshComponent';
import {isPrimitiveComponent} from '../Renderer/PrimitiveComponent';
import PBRMaterial, {isPBRMaterial} from '../Material/PBRMaterial';
import {Matrix3, Color, SphericalHarmonics3} from '../Core/Math';
import CubeTexture from '../Texture/CubeTexture';
import CubeTextureLoader from './CubeTextureLoader';
import * as Math from '../Core/Math';
import Texture from '../Texture/Texture';
import AtlasManager from '../Texture/AtlasManager';
import ImageLoader from '../Resource/ImageLoader';
import SkyboxMaterial from '../Material/SkyboxMaterial';
import TextureLoader from './TextureLoader';
import LazyTexture from '../Texture/LazyTexture';
import Material from '../Material/Material';
/**
* GlTF扩展的接口类型。
*
* @member name 扩展的名字,比如`Sein_node`。
* @member init 文件初始化时,遇到该扩展或执行的方法。
* @member parseOnLoad 文件解析时,遇到全局扩展时将会执行的方法,此时资源可能没有加载完毕。
* @member parseOnEnd 文件解析时,遇到全局扩展时将会执行的方法,此时资源均已加载完毕。
* @member parse 文件解析时,遇到该扩展时将会执行的方法。如果没有将有默认逻辑,将该扩展的属性保存下来,直接传递给在`instantiate`中的`info`。
* @member instantiate 某个节点实例化结束后,遇到该扩展时将会执行的方法。
*/
export interface IGlTFExtension<IExtensionInfo = any> {
name: string;
init?(loader: {game: Game}, parser: IGlTFParser): any;
parseOnLoad?(info: IExtensionInfo, parser: IGlTFParser): any;
parseOnEnd?(info: IExtensionInfo, parser: IGlTFParser, model: IGlTFModel): any;
parse?(info: IExtensionInfo, parser: IGlTFParser, result: any, options?: any): any;
instantiate?(entity: SceneActor | SceneComponent, info: IExtensionInfo, game: Game, node: INodeWithGlTFExtensions, resource: IGlTFModel): void;
[key: string]: any;
}
/**
* @hidden
*/
export const getDefaultParse = (name: string) => (info: ISeinNodeExtension, parser: IGlTFParser, node: INodeWithGlTFExtensions) => {
if (node) {
node.gltfExtensions = node.gltfExtensions || {};
node.gltfExtensions[name] = info;
return node;
}
};
/**
* 模型解压缩扩展的配置实例。
*/
export const AliAMCExtension = (Hilo3d as any).AliAMCExtension as {
/**
* Worker的路径,支持相对路径。
*/
workerURL: string;
/**
* 是否强制使用WASM。
*/
useWASM: boolean;
/**
* 是否强制使用Worker。
*/
useWebWorker: boolean;
/**
* 是否自动判断是否使用。
*/
useAuto: boolean;
/**
* 解压时间统计。
*/
_decodeTotalTime: number;
/**
* 手动释放解压需要内存。
*/
freeMemory(): void;
};
/**
* @hidden
*/
export const SeinNodeExtension: IGlTFExtension<ISeinNodeExtension> = {
name: 'Sein_node',
optionParsers: {},
registerOptionParsers(type: string, parser: (value: any, parser: IGlTFParser, info: ISeinNodeExtension) => any) {
SeinNodeExtension.optionParsers[type] = parser;
},
unregisterOptionParsers(type: string) {
delete SeinNodeExtension.optionParsers[type];
},
parseOption(option: any, parser: IGlTFParser, info: ISeinNodeExtension) {
const {type, value} = option;
if (type && SeinNodeExtension.optionParsers[type]) {
return SeinNodeExtension.optionParsers[type](value, parser, info);
}
else {
throw new Error(`You must register parser for type '${type}' in Sein_node.initOptions !`);
}
},
parse(info: ISeinNodeExtension, parser: IGlTFParser, node: INodeWithGlTFExtensions, options?: any) {
node.gltfExtensions = node.gltfExtensions || {};
node.gltfExtensions[SeinNodeExtension.name] = info;
if (!info.initOptions) {
return node;
}
const initOptions = info.initOptions;
Object.keys(initOptions).forEach(name => {
initOptions[name] = SeinNodeExtension.parseOption(initOptions[name], parser, info);
});
return node;
},
instantiate(entity: SceneActor | SceneComponent, info: ISeinNodeExtension) {
if (info.updateOnEverTick !== undefined) {
entity.updateOnEverTick = info.updateOnEverTick;
}
if (info.neverTransform !== undefined) {
entity.isStatic = info.neverTransform;
}
if (isSceneActor(entity)) {
if (info.tag) {
entity.tag = new SName(info.tag);
}
if (info.layers) {
entity.layers.set(info.layers);
}
if (info.persistent !== undefined) {
entity.persistent = info.persistent;
}
if (info.emitComponentsDestroy !== undefined) {
entity.emitComponentsDestroy = info.emitComponentsDestroy;
}
}
}
};
['Float', 'Int', 'String', 'Bool'].forEach(type => {
SeinNodeExtension.registerOptionParsers(type, (value: any) => value)
});
SeinNodeExtension.registerOptionParsers('Vec2', (value: number[]) => new Math.Vector2().fromArray(value));
SeinNodeExtension.registerOptionParsers('Vec3', (value: number[]) => new Math.Vector3().fromArray(value));
SeinNodeExtension.registerOptionParsers('Vec4', (value: number[]) => new Math.Vector4().fromArray(value));
SeinNodeExtension.registerOptionParsers('Mat4', (value: number[]) => new Math.Matrix4().fromArray(value));
SeinNodeExtension.registerOptionParsers('Quat', (value: number[]) => new Math.Quaternion().fromArray(value));
SeinNodeExtension.registerOptionParsers('Color', (value: number[]) => new Math.Color().fromArray(value));
SeinNodeExtension.registerOptionParsers('Tex2D', (value: {index: number}, parser: IGlTFParser) => parser.textures[value.index]);
SeinNodeExtension.registerOptionParsers('TexCube', (value: {index: number}, parser: IGlTFParser) => parser.cubeTextures[value.index]);
SeinNodeExtension.registerOptionParsers('Atlas', (value: {index: number}, parser: IGlTFParser) => parser.atlases[value.index]);
SeinNodeExtension.registerOptionParsers('Mat', (value: {index: number}, parser: IGlTFParser) => parser.materials[value.index]);
SeinNodeExtension.registerOptionParsers('Array', (value: any[], parser: IGlTFParser, info: ISeinNodeExtension) => {
return value.map(v => SeinNodeExtension.parseOption(v, parser, info))
});
SeinNodeExtension.registerOptionParsers('Object', (value: Object, parser: IGlTFParser, info: ISeinNodeExtension) => {
const result: any = {};
Object.keys(value).forEach(name => {
result[name] = SeinNodeExtension.parseOption(value[name], parser, info);
});
return result;
});
/**
* @hidden
*/
export const SeinPhysicBodyExtension: IGlTFExtension<ISeinPhysicBodyExtension> = {
name: 'Sein_physicBody',
instantiate(entity: SceneActor | SceneComponent, info: ISeinPhysicBodyExtension, game: Game) {
if (!isSceneActor(entity)) {
Debug.warn(`You could not add physicBody to a component: ${entity.name}, ignore...`);
return;
}
const physicBody = info;
if (!game.world.physicWorld) {
Debug.warn(`Model ${entity.name} in gltf file has physicBody, but "PhysicWorld" is not found in current world, ignore...`);
return;
}
const body = entity.addComponent('rigidBody', RigidBodyComponent, {
mass: physicBody.mass,
friction: physicBody.friction,
restitution: physicBody.restitution,
unControl: physicBody.unControl,
physicStatic: physicBody.physicStatic
});
if (info.sleeping) {
entity.rigidBody.sleep();
}
physicBody.colliders.forEach((collider, index) => {
const name = `collider-${collider.name || (collider.type + index)}`;
switch (collider.type) {
case 'BOX':
entity.addComponent(name, BoxColliderComponent, collider as ISeinBoxColliderExtension);
break;
case 'SPHERE':
entity.addComponent(name, SphereColliderComponent, collider as ISeinSphereColliderExtension);
break;
default:
break;
}
});
if (physicBody.colliders.length) {
body.forceSync();
}
}
};
/**
* @hidden
*/
export const SeinAnimatorExtension: IGlTFExtension<ISeinNodeExtension> = {
name: 'Sein_animator',
instantiate(entity: SceneActor | SceneComponent) {
if (!isSceneActor(entity)) {
Debug.warn(`You could not add animator to a component: ${entity.name}, ignore...`);
return;
}
}
};
/**
* @hidden
*/
export const SeinRendererExtension: IGlTFExtension<ISeinRendererExtension> = {
name: 'Sein_renderer',
parseOnLoad(info, parser: IGlTFParser) {
parser['renderer'] = parser.json.extensions.Sein_renderer || {};
},
parseOnEnd(info, parser: IGlTFParser, model: IGlTFModel) {
model['renderer'] = parser['renderer'];
return model;
},
instantiate(entity: SceneActor | SceneComponent, info: ISeinRendererExtension, _, __, resource: IGlTFModel) {
const root = isSceneActor(entity) ? entity.root : entity;
if (!isStaticMeshComponent(root) && !isPrimitiveComponent(root)) {
return;
}
root.getMaterials().forEach(material => {
if (isStaticMeshComponent(root) && info.lightMap) {
const matName = (material as any).name;
material = material.clone();
root.setMaterial(material, matName);
const {lightMapIndex, uvChannel, uvRotation, uvScale, uvOffset} = info.lightMap;
const texture = resource.textures[lightMapIndex];
texture.uv = uvChannel;
if (isPBRMaterial(material)) {
(material as any).lightMap = texture;
} else if (isRawShaderMaterial(material)) {
material.setUniform('u_lightMap', texture);
}
if (uvChannel === 0) {
material.uvMatrix = new Matrix3().fromRotationTranslationScale(uvRotation, uvOffset[0], uvOffset[1], uvScale[0], uvScale[1]);
} else {
material.uvMatrix1 = new Matrix3().fromRotationTranslationScale(uvRotation, uvOffset[0], uvOffset[1], uvScale[0], uvScale[1]);
}
}
material.receiveShadows = info.castShadows || false;
material.castShadows = info.receiveShadows || false;
material.gammaCorrection = info.gammaCorrection || false;
const globalSetting: ISeinRendererGlobalExtension = resource['renderer'];
if (globalSetting) {
material.gammaCorrection = globalSetting.gammaCorrection || false;
material.useHDR = globalSetting.useHDR || false;
material.exposure = globalSetting.exposure ? material.exposure : globalSetting.exposure;
}
});
}
};
/**
* @hidden
*/
export const SeinAmbientLightExtension: IGlTFExtension<ISeinAmbientLightExtension> = {
name: 'Sein_ambientLight',
parseOnEnd(info, parser: IGlTFParser, model: IGlTFModel) {
const {color, intensity} = info;
const light = new Hilo3d.AmbientLight({
name: 'gltf-ambientLight-extension',
color: new Color(color[0], color[1], color[2]),
amount: intensity
});
model['ambientLight'] = light;
return model;
}
};
/**
* @hidden
*/
function getRelativePath(basePath: string, path: string) {
if (/^(?:http|blob|data:|\/)/.test(path)) {
return path;
}
const basePaths = basePath.replace(/\/[^/]*?$/, '').split('/');
const paths = path.split('/');
let i = 0;
for (i = 0; i < paths.length; i += 1) {
const p = paths[i];
if (p === '..') {
basePaths.pop();
} else if (p !== '.') {
break;
}
}
return basePaths.join('/') + '/' + paths.slice(i).join('/');
}
/**
* @hidden
*/
export const SeinCubeTextureExtension: IGlTFExtension = {
name: 'Sein_cubeTexture',
init(_, parser: IGlTFParser) {
const game: Game = parser.game;
const actions = [];
const extensions = parser.json.extensions || {};
const source: ISeinCubeTextureExtension = extensions.Sein_cubeTexture || {};
const textures = source.textures || [];
const cubeTextures: CubeTexture[] = [];
parser.cubeTextures = cubeTextures;
textures.forEach((tex, index) => {
const images: string[] = tex.images.map(imageIndex => {
let uri = parser.getImageUri(imageIndex);
uri = getRelativePath(parser.src, uri);
return uri;
});
actions.push(Promise.resolve().then(() => new Promise((resolve, reject) => {
const format = /\.png$/.test(images[0]) ? Constants.RGBA : Constants.RGB;
game.resource.getLoader<CubeTextureLoader>('CubeTexture').load({
type: 'CubeTexture', url: '', images: {
right: images[0],
left: images[1],
top: images[2],
bottom: images[3],
front: images[4],
back: images[5]
},
name: '',
format: format,
internalFormat: format,
isImageCanRelease: tex.isImageCanRelease
}, {
onLoading: (_, progress: number) => {},
onLoaded: entity => {
const cubeTex = entity.result;
const sampler = parser.json.samplers[tex.sampler];
cubeTextures[index] = cubeTex;
cubeTex.minFilter = sampler.minFilter || cubeTex.minFilter;
cubeTex.magFilter = sampler.magFilter || cubeTex.magFilter;
cubeTex.wrapS = sampler.wrapS || cubeTex.wrapS;
cubeTex.wrapT = sampler.wrapT || cubeTex.wrapT;
resolve();
},
onError: (_, error) => reject(error)
});
})));
})
return Promise.all(actions);
},
parseOnEnd(info, parser: IGlTFParser, model: IGlTFModel) {
model.cubeTextures = parser.cubeTextures;
return model;
}
}
/**
* @hidden
*/
export const SeinImageBasedLightingExtension: IGlTFExtension<ISeinImageBasedLightingExtension> = {
name: 'Sein_imageBasedLighting',
init(_, parser: IGlTFParser) {
const game: Game = parser.game;
const extensions = parser.json.extensions || {};
const iblSources: ISeinImageBasedLightingSourceExtension = extensions.Sein_imageBasedLighting || {};
const lights = iblSources.lights || [];
const imageBasedLights: ISeinImageBasedLight[] = [];
parser.imageBasedLights = imageBasedLights;
lights.forEach((light, index) => {
imageBasedLights.push({
diffuse: {
type: light.diffuse.type,
intensity: light.diffuse.intensity,
coefficients: new SphericalHarmonics3().fromArray(light.diffuse.coefficients),
},
specular: light.specular ? {
type: light.specular.type,
intensity: light.specular.intensity,
brdfLUTIndex: light.specular.brdfLUT.index,
includeMipmaps: light.specular.includeMipmaps,
brdfLUT: null,
cubeMap: null
} : null
});
const {specular} = imageBasedLights[index];
if (!specular) {
return;
}
specular.mapIndex = light.specular.map.index;
});
},
parseOnLoad(info, parser: IGlTFParser) {
parser.imageBasedLights.forEach(light => {
if (!light.specular) {
return;
}
light.specular.brdfLUT = parser.textures[light.specular.brdfLUTIndex];
if (light.specular.type == '2D') {
light.specular.map = parser.textures[light.specular.mapIndex];
} else {
light.specular.map = parser.cubeTextures[light.specular.mapIndex];
}
});
},
parseOnEnd(info, parser: IGlTFParser, model: IGlTFModel) {
model.imageBasedLights = parser.imageBasedLights;
return model;
},
parse(info, parser: IGlTFParser, entity: any) {
// material extension
const material = entity;
const {light: lIndex, type} = info;
const light = parser.imageBasedLights[lIndex] as ISeinImageBasedLight;
if (isPBRMaterial(material)) {
// (material as any).isDiffuseEnvAndAmbientLightWorkTogether = true;
if (type == 'ALL') {
material.brdfLUT = light.specular.brdfLUT;
material.specularEnvIntensity = light.specular.intensity;
material.specularEnvMap = light.specular.map;
material.isSpecularEnvMapIncludeMipmaps = light.specular.includeMipmaps;
}
material.diffuseEnvIntensity = light.diffuse.intensity;
if (light.diffuse.type == 'SH') {
material.diffuseEnvSphereHarmonics3 = light.diffuse.coefficients;
} else {
/**
* @todo: env map
*/
}
} else if (isRawShaderMaterial(material)) {
/**
* @todo: support SeinCustomMaterial
*/
if (type == 'ALL') {
material.setUniform('u_brdfLUT', light.specular.brdfLUT);
material.setUniform('u_specularEnvIntensity', light.specular.intensity);
material.setUniform('u_specularEnvMap', light.specular.map);
(material as any).isSpecularEnvMapIncludeMipmaps = light.specular.includeMipmaps;
}
material.setUniform('u_diffuseEnvIntensity', light.diffuse.intensity);
if (light.diffuse.type == 'SH') {
material.setUniform('u_diffuseEnvSphereHarmonics3', light.diffuse.coefficients);
} else {
/**
* @todo: env map
*/
}
}
return material;
}
};
/**
* @hidden
*/
export const SeinAtlasExtension: IGlTFExtension = {
name: 'Sein_atlas',
init(_, parser: IGlTFParser) {
const game: Game = parser.game;
const actions = [];
const extensions = parser.json.extensions || {};
const ex: ISeinAtlasExtension = extensions.Sein_atlas || {};
const atlasesSource = ex.atlases || [];
const atlases: AtlasManager[] = [];
parser.atlases = atlases;
atlasesSource.forEach((atlas, index) => {
atlases.push(null);
actions.push(Promise.resolve().then(() => new Promise((resolve, reject) => {
const source = atlas.meta.image.index;
const uri = parser.getImageUri(source);
const texture = new LazyTexture() as any;
texture.uv = undefined;
texture.crossOrigin = true;
texture.autoLoad = false;
(texture as any).resType = parser.getImageType(source);
texture.src = uri;
texture.name = atlas.name;
texture.isImageCanRelease = atlas.isImageCanRelease;
const OnTextureLoad = (error?: Error) => {
texture.off('load', OnTextureLoad);
texture.off('error', OnTextureLoad);
if (error) {
return reject(error);
}
atlases[index] = new AtlasManager({texture, frames: atlas.frames, meta: atlas.meta}, false);
atlases[index].name = new SName(atlas.name);
atlases[index].isImageCanRelease = atlas.isImageCanRelease;
resolve();
};
texture.on('load', () => OnTextureLoad());
texture.on('error', error => OnTextureLoad(error));
texture.load(true);
})));
});
return Promise.all(actions);
},
parseOnEnd(info, parser: IGlTFParser, model: IGlTFModel) {
model.atlases = parser.atlases;
return model;
}
};
/**
* @hidden
*
* 这里的扩展仅仅是全局用于加载材质脚本的。
*/
export const SeinCustomMaterialExtension: IGlTFExtension = {
name: 'Sein_customMaterial',
cache: {},
init(_, parser: IGlTFParser) {
const actions = [];
const extensions = parser.json.extensions || {};
const ex: ISeinCustomMaterialSourceExtension = extensions.Sein_customMaterial || {};
const scripts = ex.scripts || [];
parser.scripts = scripts;
scripts.forEach((script, index) => {
let url = script.uri;
if (SeinCustomMaterialExtension.cache[url]) {
return;
}
SeinCustomMaterialExtension.cache[url] = true;
url = getRelativePath(parser.src, url);
actions.push(new Promise((resolve, reject) => {
const s = document.createElement('script');
s.onload = () => resolve();
s.onerror = error => reject(error);
s.src = url;
document.head.append(s);
}));
});
return Promise.all(actions);
}
};
/**
* @hidden
*/
export class FakeHiloSprite extends Hilo3d.Node {
public className: string = 'FakeHiloSprite';
public isFakeHiloSprite = true;
public sprite: ISeinSpriteSourceExtension['sprites'][0];
public material: Material;
}
/**
* @hidden
*/
export const SeinSpriteExtension: IGlTFExtension<ISeinSpriteExtension> = {
name: 'Sein_sprite',
init(_, parser: IGlTFParser) {
const extensions = parser.json.extensions || {};
const ex: ISeinSpriteSourceExtension = extensions.Sein_sprite || {};
parser.sprites = (ex.sprites || []).map(sprite => {
const res = new FakeHiloSprite();
res.sprite = sprite;
return res;
});
},
parseOnEnd(info: any, parser: IGlTFParser, model: IGlTFModel) {
model['sprites'] = parser.sprites;
parser.sprites.forEach(sp => {
if (sp.sprite.material) {
sp.material = parser.materials[sp.sprite.material.index];
}
});
return model;
},
parse(info: any, parser: IGlTFParser, entity: any) {
const node = entity;
node.addChild(parser.sprites[info.index])
return node;
}
};
/**
* @hidden
*/
export const SeinSkyboxExtension: IGlTFExtension<ISeinSkyboxExtension> = {
name: 'Sein_skybox',
parse(info: ISeinSkyboxExtension, parser: IGlTFParser, entity: any) {
const camera = entity;
const material = new SkyboxMaterial({
type: info.type,
uniforms: {
u_color: {value: new Color().fromArray(info.color)},
u_factor: {value: info.factor},
u_texture: info.texture && {value: info.type === 'Panoramic' ? parser.textures[info.texture.index] : parser.cubeTextures[info.texture.index]},
u_rotation: info.rotation !== undefined && {value: info.rotation},
u_exposure: info.exposure !== undefined && {value: info.exposure},
u_degrees: info.degrees !== undefined && {value: info.degrees},
}
});
const globalSetting: ISeinRendererGlobalExtension = parser.renderer;
if (globalSetting) {
material.gammaCorrection = globalSetting.gammaCorrection || false;
material.useHDR = globalSetting.useHDR || false;
material.exposure = globalSetting.exposure ? material.exposure : globalSetting.exposure;
}
camera.backgroundMat = material;
return camera;
}
};
/**
* @hidden
*/
export const SeinTextureImproveExtension: IGlTFExtension<ISeinTextureImproveExtension> = {
name: 'Sein_textureImprove',
parse(info: ISeinTextureImproveExtension, parser: IGlTFParser, texture: Texture) {
texture.isImageCanRelease = info.isImageCanRelease;
texture.anisotropic = info.anisotropic || texture.anisotropic;
texture.type = info.textureType || texture.type;
return texture;
}
};
/**
* @hidden
*/
function parse(info, parser: IGlTFParser, material, options) {
if (options.isGlobalExtension) {
return null;
}
const textures = parser.textures || [];
const techniqueInfo = parser.techniques[info.technique];
if (!techniqueInfo) {
return null;
}
const programInfo = parser.programs[techniqueInfo.program];
if (!programInfo) {
return null;
}
const fragmentText = parser.shaders[programInfo.fragmentShader];
const vertexText = parser.shaders[programInfo.vertexShader];
const uniformsInfo = techniqueInfo.uniforms || {};
const attributesInfo = techniqueInfo.attributes || {};
const valuesInfo = info.values || {};
const attributes = {};
const uniforms = {};
for (const uniformName in uniformsInfo) {
const uniformDef = uniformsInfo[uniformName] || {};
const uniformValue = valuesInfo[uniformName] !== undefined ? valuesInfo[uniformName] : uniformDef.value;
let uniformObject;
if (uniformValue !== undefined) {
if (uniformDef.type === Constants.SAMPLER_2D) {
const textureIndex = uniformValue.index || 0;
uniformObject = {value: textures[textureIndex]};
} else {
uniformObject = {value: uniformValue};
}
} else if (uniformDef.semantic && Hilo3d.semantic[uniformDef.semantic]) {
// const semanticFunc = Hilo3d.semantic[uniformDef.semantic];
// const nodeIndex = uniformDef.node;
// let node;
// if (nodeIndex !== undefined) {
// uniformObject = {
// get(mesh, material, programInfo) {
// if (node === undefined) {
// node = parser.node.getChildByFn((node) => {
// return node.animationId === nodeIndex;
// }) || mesh;
// }
// return semanticFunc.get(node, material, programInfo);
// }
// };
// } else {
// uniformObject = uniformDef.semantic;
// }
uniformObject = uniformDef.semantic;
} else {
uniformObject = (Hilo3d.semantic as any).blankInfo;
}
uniforms[uniformName] = uniformObject;
}
for (const attributeName in attributesInfo) {
const attributeValue = attributesInfo[attributeName] || {};
if (attributeValue.semantic) {
attributes[attributeName] = attributeValue.semantic;
}
}
const shaderMaterial = new RawShaderMaterial({
vs: vertexText,
fs: fragmentText,
attributes,
uniforms
});
return shaderMaterial;
}
(Hilo3d as any).GLTFExtensions.KHR_techniques_webgl.parse = parse; | the_stack |
import {
Subject,
takeUntil,
} from "rxjs";
import { IContentProtection } from "../../types";
import {
expectLicenseRequestMessage,
MediaKeysImpl,
MediaKeySystemAccessImpl,
mockCompat,
} from "./utils";
describe("core - eme - global tests - server certificate", () => {
const getLicenseSpy = jest.fn(() => {
return new Promise(() => { /* noop */ });
});
/** Default video element used in our tests. */
const videoElt = document.createElement("video");
const serverCertificate = [1, 2, 3];
/** Default keySystems configuration used in our tests. */
const ksConfigCert = [{ type: "com.widevine.alpha",
getLicense: getLicenseSpy,
serverCertificate }];
beforeEach(() => {
jest.resetModules();
jest.restoreAllMocks();
});
/* eslint-disable max-len */
it("should set the serverCertificate only after the MediaKeys is attached", (done) => {
/* eslint-enable max-len */
// == mocks ==
mockCompat();
const createSessionSpy = jest.spyOn(MediaKeysImpl.prototype, "createSession");
const serverCertificateSpy =
jest.spyOn(MediaKeysImpl.prototype, "setServerCertificate")
.mockImplementation((_serverCertificate : BufferSource) => {
expect(createSessionSpy).not.toHaveBeenCalled();
return Promise.resolve(true);
});
// == vars ==
let eventsReceived = 0;
const initDataSubject = new Subject<IContentProtection>();
const initData = new Uint8Array([54, 55, 75]);
const kill$ = new Subject<void>();
// == test ==
const EMEManager = require("../../eme_manager").default;
EMEManager(videoElt, ksConfigCert, initDataSubject)
.pipe(takeUntil(kill$))
// eslint-disable-next-line @typescript-eslint/no-explicit-any
.subscribe((evt : { type : string; value : any }) => {
switch (++eventsReceived) {
case 1:
expect(evt.type).toEqual("created-media-keys");
evt.value.canAttachMediaKeys.setValue(true);
break;
case 2:
expect(evt.type).toEqual("attached-media-keys");
expect(createSessionSpy).not.toHaveBeenCalled();
expect(serverCertificateSpy).toHaveBeenCalledTimes(1);
expect(serverCertificateSpy).toHaveBeenCalledWith(serverCertificate);
initDataSubject.next({ type: "cenc",
values: [ { systemId: "15", data: initData } ] });
break;
case 3:
expectLicenseRequestMessage(evt,
{ type: "cenc",
values: [ { systemId: "15",
data: initData } ] });
setTimeout(() => {
expect(serverCertificateSpy).toHaveBeenCalledTimes(1);
expect(createSessionSpy).toHaveBeenCalledTimes(1);
initDataSubject.next({ type: "cenc2",
values: [ { systemId: "15", data: initData } ] });
}, 10);
break;
case 4:
expectLicenseRequestMessage(evt,
{ type: "cenc2",
values: [ { systemId: "15",
data: initData } ] });
setTimeout(() => {
kill$.next();
expect(serverCertificateSpy).toHaveBeenCalledTimes(1);
expect(createSessionSpy).toHaveBeenCalledTimes(2);
done();
}, 10);
break;
default:
throw new Error(`Unexpected event: ${evt.type}`);
}
});
});
/* eslint-disable max-len */
it("should emit warning if serverCertificate call rejects but still continue", (done) => {
/* eslint-enable max-len */
// == mocks ==
mockCompat();
const createSessionSpy = jest.spyOn(MediaKeysImpl.prototype, "createSession");
const serverCertificateSpy =
jest.spyOn(MediaKeysImpl.prototype, "setServerCertificate")
.mockImplementation((_serverCertificate : BufferSource) => {
expect(createSessionSpy).not.toHaveBeenCalled();
return Promise.reject("some error");
});
// == vars ==
let eventsReceived = 0;
const initDataSubject = new Subject<IContentProtection>();
const initData = new Uint8Array([54, 55, 75]);
const kill$ = new Subject<void>();
// == test ==
const EMEManager = require("../../eme_manager").default;
EMEManager(videoElt, ksConfigCert, initDataSubject)
.pipe(takeUntil(kill$))
// eslint-disable-next-line @typescript-eslint/no-explicit-any
.subscribe((evt : { type : string; value : any }) => {
switch (++eventsReceived) {
case 1:
expect(evt.type).toEqual("created-media-keys");
evt.value.canAttachMediaKeys.setValue(true);
break;
case 2:
expect(evt.type).toEqual("warning");
expect(createSessionSpy).not.toHaveBeenCalled();
expect(serverCertificateSpy).toHaveBeenCalledTimes(1);
expect(serverCertificateSpy).toHaveBeenCalledWith(serverCertificate);
expect(evt.value.name).toEqual("EncryptedMediaError");
expect(evt.value.type).toEqual("ENCRYPTED_MEDIA_ERROR");
expect(evt.value.code).toEqual("LICENSE_SERVER_CERTIFICATE_ERROR");
expect(evt.value.message)
.toEqual(
"EncryptedMediaError (LICENSE_SERVER_CERTIFICATE_ERROR) " +
"`setServerCertificate` error");
break;
case 3:
expect(evt.type).toEqual("attached-media-keys");
expect(createSessionSpy).not.toHaveBeenCalled();
expect(serverCertificateSpy).toHaveBeenCalledTimes(1);
initDataSubject.next({ type: "cenc",
values: [ { systemId: "15", data: initData } ] });
break;
case 4:
expectLicenseRequestMessage(evt,
{ type: "cenc",
values: [ { systemId: "15",
data: initData } ] });
setTimeout(() => {
expect(serverCertificateSpy).toHaveBeenCalledTimes(1);
expect(createSessionSpy).toHaveBeenCalledTimes(1);
initDataSubject.next({ type: "cenc2",
values: [ { systemId: "15", data: initData } ] });
}, 10);
break;
case 5:
expectLicenseRequestMessage(evt,
{ type: "cenc2",
values: [ { systemId: "15",
data: initData } ] });
setTimeout(() => {
kill$.next();
expect(serverCertificateSpy).toHaveBeenCalledTimes(1);
expect(createSessionSpy).toHaveBeenCalledTimes(2);
done();
}, 10);
break;
default:
throw new Error(`Unexpected event: ${evt.type}`);
}
});
});
/* eslint-disable max-len */
it("should emit warning if serverCertificate call throws but still continue", (done) => {
/* eslint-enable max-len */
// == mocks ==
mockCompat();
const createSessionSpy = jest.spyOn(MediaKeysImpl.prototype, "createSession");
const serverCertificateSpy =
jest.spyOn(MediaKeysImpl.prototype, "setServerCertificate")
.mockImplementation((_serverCertificate : BufferSource) => {
expect(createSessionSpy).not.toHaveBeenCalled();
throw new Error("some error");
});
// == vars ==
let eventsReceived = 0;
const initDataSubject = new Subject<IContentProtection>();
const initData = new Uint8Array([54, 55, 75]);
const kill$ = new Subject<void>();
// == test ==
const EMEManager = require("../../eme_manager").default;
EMEManager(videoElt, ksConfigCert, initDataSubject)
.pipe(takeUntil(kill$))
// eslint-disable-next-line @typescript-eslint/no-explicit-any
.subscribe((evt : { type : string; value : any }) => {
switch (++eventsReceived) {
case 1:
expect(evt.type).toEqual("created-media-keys");
evt.value.canAttachMediaKeys.setValue(true);
break;
case 2:
expect(evt.type).toEqual("warning");
expect(serverCertificateSpy).toHaveBeenCalledTimes(1);
expect(serverCertificateSpy).toHaveBeenCalledWith(serverCertificate);
expect(evt.value.name).toEqual("EncryptedMediaError");
expect(evt.value.type).toEqual("ENCRYPTED_MEDIA_ERROR");
expect(evt.value.code).toEqual("LICENSE_SERVER_CERTIFICATE_ERROR");
expect(evt.value.message)
.toEqual(
"EncryptedMediaError (LICENSE_SERVER_CERTIFICATE_ERROR) Error: some error"
);
break;
case 3:
expect(evt.type).toEqual("attached-media-keys");
expect(createSessionSpy).not.toHaveBeenCalled();
expect(serverCertificateSpy).toHaveBeenCalledTimes(1);
initDataSubject.next({ type: "cenc",
values: [ { systemId: "15", data: initData } ] });
break;
case 4:
expectLicenseRequestMessage(evt,
{ type: "cenc",
values: [ { systemId: "15",
data: initData } ] });
setTimeout(() => {
expect(serverCertificateSpy).toHaveBeenCalledTimes(1);
expect(createSessionSpy).toHaveBeenCalledTimes(1);
initDataSubject.next({ type: "cenc2",
values: [ { systemId: "15", data: initData } ] });
}, 10);
break;
case 5:
expectLicenseRequestMessage(evt,
{ type: "cenc2",
values: [ { systemId: "15",
data: initData } ] });
setTimeout(() => {
kill$.next();
expect(serverCertificateSpy).toHaveBeenCalledTimes(1);
expect(createSessionSpy).toHaveBeenCalledTimes(2);
done();
}, 10);
break;
default:
throw new Error(`Unexpected event: ${evt.type}`);
}
});
});
/* eslint-disable max-len */
it("should just continue if serverCertificate is undefined", (done) => {
/* eslint-enable max-len */
// == mocks ==
mockCompat();
const createSessionSpy = jest.spyOn(MediaKeysImpl.prototype, "createSession");
jest.spyOn(MediaKeySystemAccessImpl.prototype, "createMediaKeys")
.mockImplementation(() => {
const mediaKeys = new MediaKeysImpl();
(mediaKeys as { setServerCertificate? : unknown })
.setServerCertificate = undefined;
return Promise.resolve(mediaKeys);
});
const serverCertificateSpy =
jest.spyOn(MediaKeysImpl.prototype, "setServerCertificate")
.mockImplementation((_serverCertificate : BufferSource) => {
expect(createSessionSpy).not.toHaveBeenCalled();
return Promise.resolve(true);
});
// == vars ==
let eventsReceived = 0;
const initDataSubject = new Subject<IContentProtection>();
const initData = new Uint8Array([54, 55, 75]);
const kill$ = new Subject<void>();
// == test ==
const EMEManager = require("../../eme_manager").default;
EMEManager(videoElt, ksConfigCert, initDataSubject)
.pipe(takeUntil(kill$))
// eslint-disable-next-line @typescript-eslint/no-explicit-any
.subscribe((evt : { type : string; value : any }) => {
switch (++eventsReceived) {
case 1:
expect(evt.type).toEqual("created-media-keys");
evt.value.canAttachMediaKeys.setValue(true);
break;
case 2:
expect(evt.type).toEqual("attached-media-keys");
expect(createSessionSpy).not.toHaveBeenCalled();
expect(serverCertificateSpy).toHaveBeenCalledTimes(0);
initDataSubject.next({ type: "cenc",
values: [ { systemId: "15", data: initData } ] });
break;
case 3:
expectLicenseRequestMessage(evt,
{ type: "cenc",
values: [ { systemId: "15",
data: initData } ] });
setTimeout(() => {
expect(serverCertificateSpy).toHaveBeenCalledTimes(0);
expect(createSessionSpy).toHaveBeenCalledTimes(1);
initDataSubject.next({ type: "cenc2",
values: [ { systemId: "15", data: initData } ] });
}, 10);
break;
case 4:
expectLicenseRequestMessage(evt,
{ type: "cenc2",
values: [ { systemId: "15",
data: initData } ] });
setTimeout(() => {
kill$.next();
expect(serverCertificateSpy).toHaveBeenCalledTimes(0);
expect(createSessionSpy).toHaveBeenCalledTimes(2);
done();
}, 10);
break;
default:
throw new Error(`Unexpected event: ${evt.type}`);
}
});
});
}); | the_stack |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.