text stringlengths 2.5k 6.39M | kind stringclasses 3
values |
|---|---|
import * as angular from 'angular';
import { PersonaStyleEnum } from '../../core/personaStyleEnum';
import { PresenceEnum } from '../../core/personaPresenceEnum';
import { PersonaInitialsColor } from '../../core/personaInitialsColorEnum';
import { PersonaSize } from './sizeEnum';
/**
* @ngdoc directive
* @name uifPersonaText
* @module officeuifabric.components.persona
* @restrict E
*
* @description
* `<uif-persona-text>` directive is used to render information about associated user.
* This directive class is used to provide functionality for multiple text directives:
* - uif-persona-text-primary
* - uif-persona-text-secondary
* - uif-persona-text-tertiary
* - uif-persona-text-optional
* Type of directive is determined by the parameter injected into the factory method:
*
* <pre>
* .directive('uifPersonaTertiaryText', PersonaTextDirective.factory('tertiary'))
* .directive('uifPersonaOptionalText', PersonaTextDirective.factory(''));
* </pre>
*/
export class PersonaTextDirective implements angular.IDirective {
public restrict: string = 'E';
public transclude: boolean = true;
public replace: boolean = false;
public scope: boolean = false;
private availableClasses: { [directiveType: string]: string } = {
'optional': 'ms-Persona-optionalText',
'primary': 'ms-Persona-primaryText',
'secondary': 'ms-Persona-secondaryText',
'tertiary': 'ms-Persona-tertiaryText'
};
public static factory(type: string): angular.IDirectiveFactory {
const directive: angular.IDirectiveFactory = () => new PersonaTextDirective(type);
return directive;
}
// template based on the passed type
public template: any = ($element: angular.IAugmentedJQuery, $attrs: any) => {
let directiveTemplate: string = '<div class="' + this.availableClasses[this.directiveType] + '" ng-transclude></div>';
return directiveTemplate;
}
public constructor(private directiveType: string) {
if (angular.isUndefined(this.availableClasses[this.directiveType])) {
this.directiveType = 'optional';
}
}
}
/**
* @ngdoc directive
* @name uifPersonaInitials
* @module officeuifabric.components.persona
* @restrict E
*
* @description
* `<uif-persona-initials>` directive is used to render initials of the user when picture is not provided.
*
* @usage
*
* <uif-persona uif-style="square" uif-size="xlarge" uif-presence="available" uif-image-url="Persona.Person2.png">
* <uif-persona-initials>AL</uif-persona-initials>
* </uif-persona>
*/
export class PersonaInitialsDirective implements angular.IDirective {
public restrict: string = 'E';
public transclude: boolean = true;
public replace: boolean = false;
public require: string[] = ['^uifPersona'];
public scope: {} = {
'uifColor': '@'
};
public template: string = '<div class="ms-Persona-initials ms-Persona-initials--{{uifColor}}" ng-transclude></div> ';
public static factory(): angular.IDirectiveFactory {
const directive: angular.IDirectiveFactory = () => new PersonaInitialsDirective();
return directive;
}
public link: angular.IDirectiveLinkFn = (
scope: IPersonaInitialsScope,
element: angular.IAugmentedJQuery,
attrs: IPersonaInitialsAttributes,
ctrls: any) => {
let personaController: PersonaController = ctrls[0];
if (angular.isUndefined(attrs.uifColor)) {
scope.uifColor = PersonaInitialsColor[PersonaInitialsColor.blue];
}
scope.$watch('uifColor', (newColor: string) => {
if (angular.isUndefined(PersonaInitialsColor[newColor])) {
personaController.$log.error('Error [ngOfficeUiFabric] officeuifabric.components.persona - "' + newColor + '"' +
' is not a valid value for uifColor.' +
' It should be lightBlue, blue, darkBlue, teal, lightGreen, green,' +
' darkGreen, lightPink, pink, magenta, purple, black, orange, red or darkRed.');
}
});
}
}
/**
* @ngdoc interface
* @name IPersonaInitialsScope
* @module officeuifabric.components.persona
*
* @description
* Scope used by the persona directive.
*
* @property {string} uifColor Color used for initials
*/
export interface IPersonaInitialsScope extends angular.IScope {
uifColor: string;
}
/**
* @ngdoc interface
* @name IPersonaInitialsAttributes
* @module officeuifabric.components.persona
*
* @description
* Attributes used by the directive
*
* @property {string} uiColor Determines the color used for initials
*/
export interface IPersonaInitialsAttributes extends angular.IAttributes {
uifColor: string;
}
/**
* @ngdoc directive
* @name uifPersona
* @module officeuifabric.components.persona
*
* @restrict E
*
* @description
* `<uif-persona>` is the persona directive.
*
* @see {link http://dev.office.com/fabric/components/persona}
*
* @usage
*
* <uif-persona uif-style="square" uif-size="xlarge" uif-presence="available" uif-image-url="Persona.Person2.png">
* <uif-persona-initials>AL</uif-persona-initials>
* <uif-persona-primary-text>Alton Lafferty</uif-persona-primary-text>
* <uif-persona-secondary-text>Interior Designer, Contoso</uif-persona-secondary-text>
* <uif-persona-tertiary-text>Office: 7/1234</uif-persona-tertiary-text>
* <uif-persona-optional-text>Available - Video capable</uif-persona-optional-text>
* </uif-persona>
*
*/
export class PersonaDirective implements angular.IDirective {
public restrict: string = 'E';
public transclude: boolean = true;
public replace: boolean = true;
public require: string[] = ['uifPersona'];
public controller: any = PersonaController;
public scope: {} = {
'uifImageUrl': '@',
'uifPresence': '@',
'uifSize': '@'
};
public template: string = '<div class="ms-Persona" ng-class="getPersonaClasses()">' +
'<div class="ms-Persona-imageArea" ng-show="getImageAreaVisibility()">' +
'<img class="ms-Persona-image" ng-src="{{uifImageUrl}}" ng-if="uifImageUrl">' +
'</div>' +
'<div class="ms-Persona-presence"></div>' +
'<div class="ms-Persona-details"></div>' +
'</div>';
// mapping enum to CSS classes for size
private uifSizeClasses: { [index: number]: string } = {
[PersonaSize.tiny]: 'ms-Persona--tiny',
[PersonaSize.xsmall]: 'ms-Persona--xs',
[PersonaSize.small]: 'ms-Persona--sm',
[PersonaSize.large]: 'ms-Persona--lg',
[PersonaSize.xlarge]: 'ms-Persona--xl'
};
// mapping enum to CSS classes for presence
private uifPresenceClasses: { [index: number]: string } = {
[PresenceEnum.available]: 'ms-Persona--available',
[PresenceEnum.away]: 'ms-Persona--away',
[PresenceEnum.blocked]: 'ms-Persona--blocked',
[PresenceEnum.busy]: 'ms-Persona--busy',
[PresenceEnum.dnd]: 'ms-Persona--dnd',
[PresenceEnum.offline]: 'ms-Persona--offline'
};
public static factory(): angular.IDirectiveFactory {
const directive: angular.IDirectiveFactory = () => new PersonaDirective();
return directive;
}
public link: angular.IDirectiveLinkFn = (
scope: IPersonaScope,
element: angular.IAugmentedJQuery,
attrs: IPersonaAttributes,
controllers: any,
transclude: angular.ITranscludeFunction): void => {
let personaController: PersonaController = controllers[0];
// validate attributes
if (angular.isDefined(attrs.uifSize) && angular.isUndefined(PersonaSize[attrs.uifSize])) {
personaController.$log.error('Error [ngOfficeUiFabric] officeuifabric.components.persona - "' +
attrs.uifSize + '" is not a valid value for uifSize. It should be tiny, xsmall, small, medium, large, xlarge.');
return;
}
if (angular.isDefined(attrs.uifStyle) && angular.isUndefined(PersonaStyleEnum[attrs.uifStyle])) {
personaController.$log.error('Error [ngOfficeUiFabric] officeuifabric.components.persona - "' +
attrs.uifStyle + '" is not a valid value for uifStyle. It should be round or square.');
return;
}
if (angular.isDefined(attrs.uifPresence) && angular.isUndefined(PresenceEnum[attrs.uifPresence])) {
personaController.$log.error('Error [ngOfficeUiFabric] officeuifabric.components.persona - "' +
attrs.uifPresence + '" is not a valid value for uifPresence. It should be available, away, blocked, busy, dnd or offline.');
return;
}
scope.getImageAreaVisibility = () => {
return (PersonaSize[attrs.uifSize] !== PersonaSize.tiny);
};
// determines CSS for persona component
scope.getPersonaClasses = () => {
let personaClasses: string[] = [];
let size: number = PersonaSize[attrs.uifSize];
let presence: number = angular.isDefined(attrs.uifPresence) ? PresenceEnum[attrs.uifPresence] : PresenceEnum.offline;
if (PersonaStyleEnum[attrs.uifStyle] === PersonaStyleEnum.square) {
personaClasses.push('ms-Persona--square');
}
let sizeClass: string = this.uifSizeClasses[size];
if (angular.isDefined(sizeClass)) {
personaClasses.push(sizeClass);
}
personaClasses.push(this.uifPresenceClasses[presence]);
return personaClasses.join(' ');
};
transclude((clone: angular.IAugmentedJQuery) => {
let detailsWrapper: angular.IAugmentedJQuery = angular.element(element[0].getElementsByClassName('ms-Persona-details'));
let imageArea: angular.IAugmentedJQuery = angular.element(element[0].getElementsByClassName('ms-Persona-imageArea'));
for (let i: number = 0; i < clone.length; i++) {
let tagName: string = clone[i].tagName;
switch (tagName) {
// text directives go to persona details
case 'UIF-PERSONA-PRIMARY-TEXT':
case 'UIF-PERSONA-SECONDARY-TEXT':
case 'UIF-PERSONA-TERTIARY-TEXT':
case 'UIF-PERSONA-OPTIONAL-TEXT':
detailsWrapper.append(clone[i]);
break;
// initials go to image area
case 'UIF-PERSONA-INITIALS':
imageArea.prepend(clone[i]);
break;
default:
break;
}
}
});
}
}
/**
* @ngdoc controller
* @name PersonaController
* @module officeuifabric.components.persona
*
* @description
* Controller used for the `<uif-persona>` directive.
*/
export class PersonaController {
public static $inject: string[] = ['$log'];
constructor(public $log: angular.ILogService) { }
}
/**
* @ngdoc interface
* @name IPersonaAttributes
* @module officeuifabric.components.persona
*
* @description
* Attributes used by the directive
*
* @property {string} uifSize Determines the size of the component
* @property {string} uifStyle Determines round or square size of the component
* @property {string} uifPresence Indicates presence of the associated user
*/
export interface IPersonaAttributes extends angular.IAttributes {
uifSize: string;
uifStyle: string;
uifPresence: string;
}
/**
* @ngdoc interface
* @name IPersonaScope
* @module officeuifabric.components.persona
*
* @description
* Scope used by the persona directive.
*
* @property {string} uifPresence Indicates presence of the associated user
* @property {string} uifSize Size of the persona card component
* @property {string} uifImageUrl User image URL
* @property {function} getImageAreaVisibility Determines if the image area should be visible based on component size
* @property {function} getPersonaClasses Gets CSS classes for persona component based on type, size and presence
*/
export interface IPersonaScope extends angular.IScope {
uifPresence: string;
uifSize: string;
uifImageUrl: string;
getImageAreaVisibility: () => boolean;
getPersonaClasses: () => string;
}
/**
* @ngdoc module
* @name officeuifabric.components.persona
*
* @description
* Persona
*/
export let module: angular.IModule = angular.module('officeuifabric.components.persona', ['officeuifabric.components'])
.directive('uifPersona', PersonaDirective.factory())
.directive('uifPersonaInitials', PersonaInitialsDirective.factory())
.directive('uifPersonaPrimaryText', PersonaTextDirective.factory('primary'))
.directive('uifPersonaSecondaryText', PersonaTextDirective.factory('secondary'))
.directive('uifPersonaTertiaryText', PersonaTextDirective.factory('tertiary'))
.directive('uifPersonaOptionalText', PersonaTextDirective.factory('')); | the_stack |
import { getProjectionName } from "@here/harp-datasource-protocol";
import {
EarthConstants,
GeoBox,
GeoCoordinates,
mercatorProjection,
OrientedBox3,
Projection,
ProjectionType,
sphereProjection
} from "@here/harp-geoutils";
import { assert, expect } from "chai";
import * as sinon from "sinon";
import * as THREE from "three";
import { CameraUtils } from "../lib/CameraUtils";
import { ElevationProvider } from "../lib/ElevationProvider";
import { MapView } from "../lib/MapView";
import { MapViewUtils } from "../lib/Utils";
// Mocha discourages using arrow functions, see https://mochajs.org/#arrow-functions
function setCamera(
camera: THREE.PerspectiveCamera,
projection: Projection,
geoTarget: GeoCoordinates,
heading: number,
tilt: number,
distance: number,
ppalPoint = { x: 0, y: 0 }
) {
MapViewUtils.getCameraRotationAtTarget(
projection,
geoTarget,
-heading,
tilt,
camera.quaternion
);
MapViewUtils.getCameraPositionFromTargetCoordinates(
geoTarget,
distance,
-heading,
tilt,
projection,
camera.position
);
camera.updateMatrixWorld(true);
CameraUtils.setPrincipalPoint(camera, ppalPoint);
camera.updateProjectionMatrix();
}
describe("MapViewUtils", function () {
const EPS = 1e-8;
describe("zoomOnTargetPosition", function () {
const mapViewMock = {
maxZoomLevel: 20,
minZoomLevel: 1,
camera: new THREE.PerspectiveCamera(40),
projection: mercatorProjection,
focalLength: 256,
pixelRatio: 1.0
};
const mapView = (mapViewMock as any) as MapView;
it("only changes zoom on center", () => {
const geoTarget = new GeoCoordinates(52.5, 13.5);
const worldTarget = mapView.projection.projectPoint(geoTarget, new THREE.Vector3());
const distance = MapViewUtils.calculateDistanceFromZoomLevel(mapView, 10);
setCamera(mapView.camera, mapView.projection, geoTarget, 0, 45, distance);
MapViewUtils.zoomOnTargetPosition(mapView, 0, 0, 11);
const {
target: newWorldTarget,
distance: newDistance
} = MapViewUtils.getTargetAndDistance(mapView.projection, mapView.camera);
const newZoomLevel = MapViewUtils.calculateZoomLevelFromDistance(mapView, newDistance);
expect(newZoomLevel).to.be.closeTo(11, 1e-13);
// Make sure the target did not move.
expect(worldTarget.distanceTo(newWorldTarget)).to.be.closeTo(0, Number.EPSILON);
});
it("only changes zoom on center even when tilting", () => {
const geoTarget = new GeoCoordinates(52.5, 13.5);
const worldTarget = mapView.projection.projectPoint(geoTarget, new THREE.Vector3());
const distance = MapViewUtils.calculateDistanceFromZoomLevel(mapView, 10);
const tilt = 45;
setCamera(mapView.camera, mapView.projection, geoTarget, 0, tilt, distance);
// Change tilt first
const newTilt = 50;
MapViewUtils.getCameraRotationAtTarget(
mapView.projection,
geoTarget,
0,
newTilt,
mapView.camera.quaternion
);
MapViewUtils.getCameraPositionFromTargetCoordinates(
geoTarget,
distance,
0,
newTilt,
mapView.projection,
mapView.camera.position
);
// Now zoom in
MapViewUtils.zoomOnTargetPosition(mapView, 0, 0, 11);
const {
target: newWorldTarget,
distance: newDistance
} = MapViewUtils.getTargetAndDistance(mapView.projection, mapView.camera);
const newZoomLevel = MapViewUtils.calculateZoomLevelFromDistance(mapView, newDistance);
expect(newZoomLevel).to.be.closeTo(11, Number.EPSILON);
// Make sure the target did not move.
expect(worldTarget.distanceTo(newWorldTarget)).to.be.closeTo(0, Number.EPSILON);
});
});
[
{ projection: mercatorProjection, ppalPoint: { x: 0, y: 0 } },
{ projection: mercatorProjection, ppalPoint: { x: 0.1, y: -0.7 } },
{ projection: sphereProjection, ppalPoint: { x: 0, y: 0 } },
{ projection: sphereProjection, ppalPoint: { x: -0.9, y: 0.3 } }
].forEach(testParams => {
const projection = testParams.projection;
const ppalPoint = testParams.ppalPoint;
const projName = getProjectionName(projection);
describe(`orbitAroundScreenPoint ${projName}, ppalPoint [${ppalPoint.x},${ppalPoint.y}]`, function () {
const mapViewMock = {
maxZoomLevel: 20,
minZoomLevel: 1,
camera: new THREE.PerspectiveCamera(40),
projection,
focalLength: 256,
pixelRatio: 1.0
};
const mapView = (mapViewMock as any) as MapView;
const target = new GeoCoordinates(52.5, 13.5);
const maxTiltAngle = THREE.MathUtils.degToRad(45);
it("keeps look at target when orbiting around center", function () {
const target = new GeoCoordinates(52.5, 13.5);
setCamera(
mapView.camera,
mapView.projection,
target,
0, //heading
0, //tilt
MapViewUtils.calculateDistanceFromZoomLevel(mapView, 10),
ppalPoint
);
const {
target: oldWorldTarget,
distance: oldDistance
} = MapViewUtils.getTargetAndDistance(mapView.projection, mapView.camera);
const deltaTilt = THREE.MathUtils.degToRad(45);
const deltaAzimuth = THREE.MathUtils.degToRad(42);
MapViewUtils.orbitAroundScreenPoint(mapView, {
deltaAzimuth,
deltaTilt,
maxTiltAngle
});
const {
target: newWorldTarget,
distance: newDistance
} = MapViewUtils.getTargetAndDistance(mapView.projection, mapView.camera);
expect(oldWorldTarget.distanceTo(newWorldTarget)).to.be.closeTo(
0,
projection === sphereProjection ? 1e-9 : Number.EPSILON
);
expect(oldDistance).to.be.closeTo(newDistance, 1e-9);
// Also check that we did not introduce any roll
const { roll } = MapViewUtils.extractAttitude(mapView, mapView.camera);
expect(roll).to.be.closeTo(0, 1e-15);
});
it("limits tilt when orbiting around center", function () {
setCamera(
mapView.camera,
mapView.projection,
target,
0, // heading
0, // tilt
MapViewUtils.calculateDistanceFromZoomLevel(mapView, 4),
ppalPoint
);
const deltaTilt = THREE.MathUtils.degToRad(80);
const deltaAzimuth = 0;
MapViewUtils.orbitAroundScreenPoint(mapView, {
deltaAzimuth,
deltaTilt,
maxTiltAngle
});
const mapTargetWorld = MapViewUtils.rayCastWorldCoordinates(
mapView,
ppalPoint.x,
ppalPoint.y
);
expect(mapTargetWorld).to.not.be.null;
const { tilt } = MapViewUtils.extractSphericalCoordinatesFromLocation(
mapView,
mapView.camera,
mapTargetWorld!
);
expect(tilt).to.be.closeTo(
maxTiltAngle,
projection === sphereProjection
? 1e-7 // FIXME: Is this huge error expected?
: Number.EPSILON
);
});
for (const startTilt of [0, 20, 45]) {
it(`limits tilt when orbiting around screen point, starting at ${startTilt} deg`, function () {
setCamera(
mapView.camera,
mapView.projection,
target,
0, // heading
startTilt, // tilt
MapViewUtils.calculateDistanceFromZoomLevel(mapView, 4),
ppalPoint
);
const deltaTilt = THREE.MathUtils.degToRad(46);
const deltaAzimuth = 0;
// OffsetY >= ppalPoint.y for this to work for Sphere & Mercator, otherwise
// it works for planar, but not sphere.
const offsetX = 0.1;
const offsetY = ppalPoint.y + 0.1;
MapViewUtils.orbitAroundScreenPoint(mapView, {
center: {
x: offsetX,
y: offsetY
},
deltaAzimuth,
// Delta is past the tilt limit.
deltaTilt,
maxTiltAngle
});
const mapTargetWorldNew = MapViewUtils.rayCastWorldCoordinates(
mapView,
ppalPoint.x,
ppalPoint.y
);
const afterTilt = MapViewUtils.extractTiltAngleFromLocation(
mapView.projection,
mapView.camera,
mapTargetWorldNew!
);
if (projection === sphereProjection) {
if (afterTilt > maxTiltAngle) {
// If greater, then only within EPS, otherwise it should be less.
expect(afterTilt).to.be.closeTo(maxTiltAngle, EPS);
}
} else {
// Use a custom EPS, Number.Epsilon is too strict for such maths
expect(afterTilt).to.be.closeTo(maxTiltAngle, EPS);
}
});
}
it("keeps rotation target when orbiting around screen point", function () {
const offsetX = 0.2;
const offsetY = 0.2;
setCamera(
mapView.camera,
mapView.projection,
target,
0, //heading
0, //tilt
MapViewUtils.calculateDistanceFromZoomLevel(mapView, 10),
ppalPoint
);
const oldRotationTarget = MapViewUtils.rayCastWorldCoordinates(
mapView,
offsetX,
offsetY
);
expect(oldRotationTarget).to.be.not.null;
const deltaTilt = THREE.MathUtils.degToRad(45);
const deltaAzimuth = THREE.MathUtils.degToRad(42);
MapViewUtils.orbitAroundScreenPoint(mapView, {
center: {
x: offsetX,
y: offsetY
},
deltaAzimuth,
deltaTilt,
maxTiltAngle
});
const newRotationTarget = MapViewUtils.rayCastWorldCoordinates(
mapView,
offsetX,
offsetY
);
expect(newRotationTarget).to.be.not.null;
const distance = oldRotationTarget!.distanceTo(newRotationTarget!);
expect(distance).to.be.closeTo(0, EPS);
// Also check that we did not introduce any roll
const { roll } = MapViewUtils.extractAttitude(mapView, mapView.camera);
expect(roll).to.be.closeTo(0, EPS);
});
});
});
describe("calculateZoomLevelFromDistance", function () {
const mapViewMock = {
maxZoomLevel: 20,
minZoomLevel: 1,
camera: new THREE.PerspectiveCamera(40),
projection: mercatorProjection,
focalLength: 256,
pixelRatio: 1.0
};
const mapView = (mapViewMock as any) as MapView;
it("calculates zoom level", function () {
let result = MapViewUtils.calculateZoomLevelFromDistance(mapView, 0);
expect(result).to.be.equal(20);
result = MapViewUtils.calculateZoomLevelFromDistance(mapView, 1000000000000);
expect(result).to.be.equal(1);
/*
* 23.04.2018 - Zoom level outputs come from HARP
*/
result = MapViewUtils.calculateZoomLevelFromDistance(mapView, 1000);
result = MapViewUtils.calculateZoomLevelFromDistance(mapView, 10000);
result = MapViewUtils.calculateZoomLevelFromDistance(mapView, 1000000);
expect(result).to.be.closeTo(5.32, 0.05);
});
it("snaps zoom level to ceiling integer if close enough to it", function () {
const eps = 1e-10;
const result = MapViewUtils.calculateZoomLevelFromDistance(
mapView,
EarthConstants.EQUATORIAL_CIRCUMFERENCE * (0.25 + eps)
);
expect(result).equals(2);
});
});
it("converts target coordinates from XYZ to camera coordinates", function () {
const xyzView = {
zoom: 5,
yaw: 3,
pitch: 15,
center: [10, -10]
};
const mapViewMock = {
camera: new THREE.PerspectiveCamera(40),
projection: mercatorProjection,
focalLength: 256,
pixelRatio: 1.0
};
const mapView = (mapViewMock as any) as MapView;
const cameraHeight =
MapViewUtils.calculateDistanceToGroundFromZoomLevel(mapView, xyzView.zoom) /
Math.cos(THREE.MathUtils.degToRad(xyzView.pitch));
const cameraCoordinates = MapViewUtils.getCameraCoordinatesFromTargetCoordinates(
new GeoCoordinates(xyzView.center[0], xyzView.center[1]),
cameraHeight,
xyzView.yaw,
xyzView.pitch,
mapView
);
expect(cameraCoordinates.latitude).to.equal(7.023208311781337);
expect(cameraCoordinates.longitude).to.equal(-9.842237006382904);
});
describe("converts zoom level to distance and distance to zoom level", function () {
let mapViewMock: any;
beforeEach(function () {
mapViewMock = {
maxZoomLevel: 20,
minZoomLevel: 1,
camera: {
matrixWorld: new THREE.Matrix4()
},
projection: mercatorProjection,
focalLength: 256,
pixelRatio: 1.0
};
});
it("ensures that both functions are inverse", function () {
mapViewMock.camera.matrixWorld.makeRotationX(THREE.MathUtils.degToRad(30));
for (let zoomLevel = 1; zoomLevel <= 20; zoomLevel += 0.1) {
const distance = MapViewUtils.calculateDistanceFromZoomLevel(
mapViewMock,
zoomLevel
);
const calculatedZoomLevel = MapViewUtils.calculateZoomLevelFromDistance(
mapViewMock,
distance
);
// Expect accuracy till 10-th fractional digit (10-th place after comma).
expect(zoomLevel).to.be.closeTo(calculatedZoomLevel, 1e-10);
}
});
});
describe("wrapGeoPointsToScreen", function () {
const epsilon = 1e-10;
it("works across antimeridian #1 - west based box", function () {
const fitted = MapViewUtils.wrapGeoPointsToScreen([
new GeoCoordinates(10, -170),
new GeoCoordinates(10, 170),
new GeoCoordinates(-10, -170)
]);
assert.closeTo(fitted[0].longitude, -170, epsilon);
assert.closeTo(fitted[1].longitude, -190, epsilon);
assert.closeTo(fitted[2].longitude, -170, epsilon);
});
it("works across antimeridian #2 - east based box", function () {
const fitted = MapViewUtils.wrapGeoPointsToScreen([
new GeoCoordinates(10, 170),
new GeoCoordinates(10, -170),
new GeoCoordinates(-10, 170)
]);
assert.closeTo(fitted[0].longitude, 170, epsilon);
assert.closeTo(fitted[1].longitude, 190, epsilon);
assert.closeTo(fitted[2].longitude, 170, epsilon);
});
it("works across antimeridian #3 - east based box v2", function () {
const fitted = MapViewUtils.wrapGeoPointsToScreen([
new GeoCoordinates(10, 170),
new GeoCoordinates(10, -170),
new GeoCoordinates(-10, 170),
new GeoCoordinates(0, -179)
]);
assert.closeTo(fitted[0].longitude, 170, epsilon);
assert.closeTo(fitted[1].longitude, 190, epsilon);
assert.closeTo(fitted[2].longitude, 170, epsilon);
assert.closeTo(fitted[3].longitude, 181, epsilon);
});
it("works across antimeridian #4 - bering sea", function () {
// sample shape - polygons enclosing bering sea
// naive GeoBox would have center lon~=0, we need to center around _real_ center
// which is in bering sea center which has lon ~=180 (or -180)
const fitted = MapViewUtils.wrapGeoPointsToScreen([
new GeoCoordinates(50.95019, -179.1428493376325),
new GeoCoordinates(52.91106, 159.02544759162745),
new GeoCoordinates(69.90354, 179.15147738391926),
new GeoCoordinates(70.25714, -161.597647174786),
new GeoCoordinates(55.76049, -157.31410465785078)
]);
// 2nd and 3rd point should be offsetted
assert.closeTo(fitted[0].longitude, -179.1428493376325, 0.0001);
assert.closeTo(fitted[1].longitude, 159.02544759162745 - 360, 0.0001);
assert.closeTo(fitted[2].longitude, 179.15147738391926 - 360, 0.0001);
assert.closeTo(fitted[3].longitude, -161.597647174786, 0.0001);
assert.closeTo(fitted[4].longitude, -157.31410465785078, 0.0001);
});
});
for (const { projName, projection } of [
{ projName: "mercator", projection: mercatorProjection },
{ projName: "sphere", projection: sphereProjection }
]) {
describe(`${projName} projection`, function () {
describe("getTargetAndDistance", function () {
const elevationProvider = ({} as any) as ElevationProvider;
let sandbox: sinon.SinonSandbox;
let camera: THREE.PerspectiveCamera;
const geoTarget = GeoCoordinates.fromDegrees(0, 0);
function resetCamera() {
setCamera(camera, projection, geoTarget, 0, 0, 1e6);
}
beforeEach(function () {
sandbox = sinon.createSandbox();
camera = new THREE.PerspectiveCamera();
resetCamera();
});
it("camera target and distance are offset by elevation", function () {
elevationProvider.getHeight = sandbox.stub().returns(0);
const resultNoElevation = MapViewUtils.getTargetAndDistance(
projection,
camera,
elevationProvider
);
const geoTargetNoElevation = projection.unprojectPoint(
resultNoElevation.target
);
expect(geoTargetNoElevation).deep.equals(
GeoCoordinates.fromDegrees(geoTarget.lat, geoTarget.lng, 0)
);
const elevation = 42;
elevationProvider.getHeight = sandbox.stub().returns(elevation);
const resultElevation = MapViewUtils.getTargetAndDistance(
projection,
camera,
elevationProvider
);
expect(resultElevation.distance).equals(resultNoElevation.distance - elevation);
const geoTargetElevation = projection.unprojectPoint(resultElevation.target);
expect(geoTargetElevation).deep.equals(
GeoCoordinates.fromDegrees(geoTarget.lat, geoTarget.lng, elevation)
);
});
it("indicates whether the computation was final or not", function () {
elevationProvider.getHeight = sandbox.stub().returns(undefined);
const res1 = MapViewUtils.getTargetAndDistance(
projection,
camera,
elevationProvider
);
expect(res1.final).to.be.false;
elevationProvider.getHeight = sandbox.stub().returns(0);
const res2 = MapViewUtils.getTargetAndDistance(
projection,
camera,
elevationProvider
);
expect(res2.final).to.be.true;
const res3 = MapViewUtils.getTargetAndDistance(projection, camera);
expect(res3.final).to.be.true;
});
});
describe("constrainTargetAndDistanceToViewBounds", function () {
const camera = new THREE.PerspectiveCamera(undefined, 1);
const mapViewMock = {
maxZoomLevel: 20,
minZoomLevel: 1,
camera,
projection,
focalLength: 256,
worldMaxBounds: undefined as THREE.Box3 | OrientedBox3 | undefined,
renderer: {
getSize() {
return new THREE.Vector2(300, 300);
}
}
};
const mapView = (mapViewMock as any) as MapView;
it("target and distance are unchanged when no bounds set", function () {
const geoTarget = GeoCoordinates.fromDegrees(0, 0);
const worldTarget = mapView.projection.projectPoint(
geoTarget,
new THREE.Vector3()
);
const distance = 1e7;
setCamera(camera, mapView.projection, geoTarget, 0, 0, distance);
const constrained = MapViewUtils.constrainTargetAndDistanceToViewBounds(
worldTarget,
distance,
mapView
);
expect(constrained.target).deep.equals(worldTarget);
expect(constrained.distance).equals(distance);
});
it("target and distance are unchanged when view within bounds", function () {
const geoTarget = GeoCoordinates.fromDegrees(0, 0);
const geoBounds = new GeoBox(
GeoCoordinates.fromDegrees(-50, -50),
GeoCoordinates.fromDegrees(50, 50)
);
const worldTarget = mapView.projection.projectPoint(
geoTarget,
new THREE.Vector3()
);
mapViewMock.worldMaxBounds = mapView.projection.projectBox(
geoBounds,
mapView.projection.type === ProjectionType.Planar
? new THREE.Box3()
: new OrientedBox3()
);
const distance = 100;
setCamera(camera, mapView.projection, geoTarget, 0, 0, distance);
const constrained = MapViewUtils.constrainTargetAndDistanceToViewBounds(
worldTarget,
distance,
mapView
);
expect(constrained.target).deep.equals(worldTarget);
expect(constrained.distance).equals(distance);
});
it("target and distance are constrained when camera is too far", function () {
const tilt = 0;
const heading = 0;
const geoTarget = GeoCoordinates.fromDegrees(0, 0);
const geoBounds = new GeoBox(
GeoCoordinates.fromDegrees(-1, -1),
GeoCoordinates.fromDegrees(1, 1)
);
const worldTarget = mapView.projection.projectPoint(
geoTarget,
new THREE.Vector3()
);
mapViewMock.worldMaxBounds = mapView.projection.projectBox(
geoBounds,
mapView.projection.type === ProjectionType.Planar
? new THREE.Box3()
: new OrientedBox3()
);
const distance = 1e6;
setCamera(camera, mapView.projection, geoTarget, heading, tilt, distance);
const constrained = MapViewUtils.constrainTargetAndDistanceToViewBounds(
worldTarget,
distance,
mapView
);
const boundsCenter = (mapViewMock.worldMaxBounds as THREE.Box3).getCenter(
new THREE.Vector3()
);
if (mapView.projection.type === ProjectionType.Planar) {
boundsCenter.setZ(worldTarget.z);
} else {
boundsCenter.setLength(worldTarget.length());
}
expect(constrained.target).deep.equals(boundsCenter);
expect(constrained.distance).to.be.lessThan(distance);
const constrainedGeoTarget = mapView.projection.unprojectPoint(
constrained.target
);
const newTilt = MapViewUtils.extractTiltAngleFromLocation(
mapView.projection,
camera,
constrainedGeoTarget
);
expect(THREE.MathUtils.radToDeg(newTilt)).to.be.closeTo(tilt, 1e-3);
});
it("target and distance are constrained if target is out of bounds", function () {
const tilt = 50;
const heading = 10;
const geoTarget = GeoCoordinates.fromDegrees(10.1, 10);
const geoBounds = new GeoBox(
GeoCoordinates.fromDegrees(-10, -10),
GeoCoordinates.fromDegrees(10, 10)
);
const worldTarget = mapView.projection.projectPoint(
geoTarget,
new THREE.Vector3()
);
mapViewMock.worldMaxBounds = mapView.projection.projectBox(
geoBounds,
mapView.projection.type === ProjectionType.Planar
? new THREE.Box3()
: new OrientedBox3()
);
const distance = 100;
setCamera(camera, mapView.projection, geoTarget, heading, tilt, distance);
const constrained = MapViewUtils.constrainTargetAndDistanceToViewBounds(
worldTarget,
distance,
mapView
);
const constrainedGeoTarget = mapView.projection.unprojectPoint(
constrained.target
);
expect(geoBounds.contains(constrainedGeoTarget)).to.equal(true);
expect(constrained.distance).equals(distance);
const newTilt = MapViewUtils.extractTiltAngleFromLocation(
mapView.projection,
camera,
constrainedGeoTarget
);
expect(THREE.MathUtils.radToDeg(newTilt)).to.be.closeTo(tilt, 1e-3);
});
});
});
}
}); | the_stack |
import * as assert from 'assert';
import 'mocha';
import * as os from 'os';
import * as vscode from 'vscode';
import { assertNoRpc, closeAllEditors, delay, disposeAll } from '../utils';
const webviewId = 'myWebview';
function workspaceFile(...segments: string[]) {
return vscode.Uri.joinPath(vscode.workspace.workspaceFolders![0].uri, ...segments);
}
suite('vscode API - webview', () => {
const disposables: vscode.Disposable[] = [];
function _register<T extends vscode.Disposable>(disposable: T) {
disposables.push(disposable);
return disposable;
}
teardown(async () => {
assertNoRpc();
await closeAllEditors();
disposeAll(disposables);
});
test('webviews should be able to send and receive messages', async () => {
const webview = _register(vscode.window.createWebviewPanel(webviewId, 'title', { viewColumn: vscode.ViewColumn.One }, { enableScripts: true }));
const firstResponse = getMessage(webview);
webview.webview.html = createHtmlDocumentWithBody(/*html*/`
<script>
const vscode = acquireVsCodeApi();
window.addEventListener('message', (message) => {
vscode.postMessage({ value: message.data.value + 1 });
});
</script>`);
webview.webview.postMessage({ value: 1 });
assert.strictEqual((await firstResponse).value, 2);
});
test('webviews should not have scripts enabled by default', async () => {
const webview = _register(vscode.window.createWebviewPanel(webviewId, 'title', { viewColumn: vscode.ViewColumn.One }, {}));
const response = Promise.race<any>([
getMessage(webview),
new Promise<{}>(resolve => setTimeout(() => resolve({ value: '🎉' }), 1000))
]);
webview.webview.html = createHtmlDocumentWithBody(/*html*/`
<script>
const vscode = acquireVsCodeApi();
vscode.postMessage({ value: '💉' });
</script>`);
assert.strictEqual((await response).value, '🎉');
});
test('webviews should update html', async () => {
const webview = _register(vscode.window.createWebviewPanel(webviewId, 'title', { viewColumn: vscode.ViewColumn.One }, { enableScripts: true }));
{
const response = getMessage(webview);
webview.webview.html = createHtmlDocumentWithBody(/*html*/`
<script>
const vscode = acquireVsCodeApi();
vscode.postMessage({ value: 'first' });
</script>`);
assert.strictEqual((await response).value, 'first');
}
{
const response = getMessage(webview);
webview.webview.html = createHtmlDocumentWithBody(/*html*/`
<script>
const vscode = acquireVsCodeApi();
vscode.postMessage({ value: 'second' });
</script>`);
assert.strictEqual((await response).value, 'second');
}
});
test.skip('webviews should preserve vscode API state when they are hidden', async () => {
const webview = _register(vscode.window.createWebviewPanel(webviewId, 'title', { viewColumn: vscode.ViewColumn.One }, { enableScripts: true }));
const ready = getMessage(webview);
webview.webview.html = createHtmlDocumentWithBody(/*html*/`
<script>
const vscode = acquireVsCodeApi();
let value = (vscode.getState() || {}).value || 0;
window.addEventListener('message', (message) => {
switch (message.data.type) {
case 'get':
vscode.postMessage({ value });
break;
case 'add':
++value;;
vscode.setState({ value });
vscode.postMessage({ value });
break;
}
});
vscode.postMessage({ type: 'ready' });
</script>`);
await ready;
const firstResponse = await sendReceiveMessage(webview, { type: 'add' });
assert.strictEqual(firstResponse.value, 1);
// Swap away from the webview
const doc = await vscode.workspace.openTextDocument(workspaceFile('bower.json'));
await vscode.window.showTextDocument(doc);
// And then back
const ready2 = getMessage(webview);
webview.reveal(vscode.ViewColumn.One);
await ready2;
// We should still have old state
const secondResponse = await sendReceiveMessage(webview, { type: 'get' });
assert.strictEqual(secondResponse.value, 1);
});
test.skip('webviews should preserve their context when they are moved between view columns', async () => { // TODO@mjbvz https://github.com/microsoft/vscode/issues/141001
const doc = await vscode.workspace.openTextDocument(workspaceFile('bower.json'));
await vscode.window.showTextDocument(doc, vscode.ViewColumn.One);
// Open webview in same column
const webview = _register(vscode.window.createWebviewPanel(webviewId, 'title', { viewColumn: vscode.ViewColumn.One }, { enableScripts: true }));
const ready = getMessage(webview);
webview.webview.html = statefulWebviewHtml;
await ready;
const firstResponse = await sendReceiveMessage(webview, { type: 'add' });
assert.strictEqual(firstResponse.value, 1);
// Now move webview to new view column
webview.reveal(vscode.ViewColumn.Two);
// We should still have old state
const secondResponse = await sendReceiveMessage(webview, { type: 'get' });
assert.strictEqual(secondResponse.value, 1);
});
test.skip('webviews with retainContextWhenHidden should preserve their context when they are hidden', async function () {
this.retries(3);
const webview = _register(vscode.window.createWebviewPanel(webviewId, 'title', { viewColumn: vscode.ViewColumn.One }, { enableScripts: true, retainContextWhenHidden: true }));
const ready = getMessage(webview);
webview.webview.html = statefulWebviewHtml;
await ready;
const firstResponse = await sendReceiveMessage(webview, { type: 'add' });
assert.strictEqual((await firstResponse).value, 1);
// Swap away from the webview
const doc = await vscode.workspace.openTextDocument(workspaceFile('bower.json'));
await vscode.window.showTextDocument(doc);
// And then back
webview.reveal(vscode.ViewColumn.One);
// We should still have old state
const secondResponse = await sendReceiveMessage(webview, { type: 'get' });
assert.strictEqual(secondResponse.value, 1);
});
test.skip('webviews with retainContextWhenHidden should preserve their page position when hidden', async () => {
const webview = _register(vscode.window.createWebviewPanel(webviewId, 'title', { viewColumn: vscode.ViewColumn.One }, { enableScripts: true, retainContextWhenHidden: true }));
const ready = getMessage(webview);
webview.webview.html = createHtmlDocumentWithBody(/*html*/`
${'<h1>Header</h1>'.repeat(200)}
<script>
const vscode = acquireVsCodeApi();
setTimeout(() => {
window.scroll(0, 100);
vscode.postMessage({ value: window.scrollY });
}, 500);
window.addEventListener('message', (message) => {
switch (message.data.type) {
case 'get':
vscode.postMessage({ value: window.scrollY });
break;
}
});
vscode.postMessage({ type: 'ready' });
</script>`);
await ready;
const firstResponse = getMessage(webview);
assert.strictEqual(Math.round((await firstResponse).value), 100);
// Swap away from the webview
const doc = await vscode.workspace.openTextDocument(workspaceFile('bower.json'));
await vscode.window.showTextDocument(doc);
// And then back
webview.reveal(vscode.ViewColumn.One);
// We should still have old scroll pos
const secondResponse = await sendReceiveMessage(webview, { type: 'get' });
assert.strictEqual(Math.round(secondResponse.value), 100);
});
test.skip('webviews with retainContextWhenHidden should be able to recive messages while hidden', async () => { // TODO@mjbvz https://github.com/microsoft/vscode/issues/139960
const webview = _register(vscode.window.createWebviewPanel(webviewId, 'title', { viewColumn: vscode.ViewColumn.One }, { enableScripts: true, retainContextWhenHidden: true }));
const ready = getMessage(webview);
webview.webview.html = statefulWebviewHtml;
await ready;
const firstResponse = await sendReceiveMessage(webview, { type: 'add' });
assert.strictEqual((await firstResponse).value, 1);
// Swap away from the webview
const doc = await vscode.workspace.openTextDocument(workspaceFile('bower.json'));
await vscode.window.showTextDocument(doc);
// Try posting a message to our hidden webview
const secondResponse = await sendReceiveMessage(webview, { type: 'add' });
assert.strictEqual((await secondResponse).value, 2);
// Now show webview again
webview.reveal(vscode.ViewColumn.One);
// We should still have old state
const thirdResponse = await sendReceiveMessage(webview, { type: 'get' });
assert.strictEqual(thirdResponse.value, 2);
});
test.skip('webviews should only be able to load resources from workspace by default', async () => { // TODO@mjbvz https://github.com/microsoft/vscode/issues/139960
const webview = _register(vscode.window.createWebviewPanel(webviewId, 'title', {
viewColumn: vscode.ViewColumn.One
}, {
enableScripts: true
}));
webview.webview.html = createHtmlDocumentWithBody(/*html*/`
<script>
const vscode = acquireVsCodeApi();
window.addEventListener('message', (message) => {
const img = document.createElement('img');
img.addEventListener('load', () => {
vscode.postMessage({ value: true });
});
img.addEventListener('error', (e) => {
console.log(e);
vscode.postMessage({ value: false });
});
img.src = message.data.src;
document.body.appendChild(img);
});
vscode.postMessage({ type: 'ready', userAgent: window.navigator.userAgent });
</script>`);
const ready = getMessage(webview);
if ((await ready).userAgent.indexOf('Firefox') >= 0) {
// Skip on firefox web for now.
// Firefox service workers never seem to get any 'fetch' requests here. Other browsers work fine
return;
}
{
const imagePath = webview.webview.asWebviewUri(workspaceFile('image.png'));
const response = await sendReceiveMessage(webview, { src: imagePath.toString() });
assert.strictEqual(response.value, true);
}
// {
// // #102188. Resource filename containing special characters like '%', '#', '?'.
// const imagePath = webview.webview.asWebviewUri(workspaceFile('image%02.png'));
// const response = await sendReceiveMessage(webview, { src: imagePath.toString() });
// assert.strictEqual(response.value, true);
// }
// {
// // #102188. Resource filename containing special characters like '%', '#', '?'.
// const imagePath = webview.webview.asWebviewUri(workspaceFile('image%.png'));
// const response = await sendReceiveMessage(webview, { src: imagePath.toString() });
// assert.strictEqual(response.value, true);
// }
{
const imagePath = webview.webview.asWebviewUri(workspaceFile('no-such-image.png'));
const response = await sendReceiveMessage(webview, { src: imagePath.toString() });
assert.strictEqual(response.value, false);
}
{
const imagePath = webview.webview.asWebviewUri(workspaceFile('..', '..', '..', 'resources', 'linux', 'code.png'));
const response = await sendReceiveMessage(webview, { src: imagePath.toString() });
assert.strictEqual(response.value, false);
}
});
test.skip('webviews should allow overriding allowed resource paths using localResourceRoots', async () => {
const webview = _register(vscode.window.createWebviewPanel(webviewId, 'title', { viewColumn: vscode.ViewColumn.One }, {
enableScripts: true,
localResourceRoots: [workspaceFile('sub')]
}));
webview.webview.html = createHtmlDocumentWithBody(/*html*/`
<script>
const vscode = acquireVsCodeApi();
window.addEventListener('message', (message) => {
const img = document.createElement('img');
img.addEventListener('load', () => { vscode.postMessage({ value: true }); });
img.addEventListener('error', () => { vscode.postMessage({ value: false }); });
img.src = message.data.src;
document.body.appendChild(img);
});
</script>`);
{
const response = sendReceiveMessage(webview, { src: webview.webview.asWebviewUri(workspaceFile('sub', 'image.png')).toString() });
assert.strictEqual((await response).value, true);
}
{
const response = sendReceiveMessage(webview, { src: webview.webview.asWebviewUri(workspaceFile('image.png')).toString() });
assert.strictEqual((await response).value, false);
}
});
test.skip('webviews using hard-coded old style vscode-resource uri should work', async () => { // TODO@mjbvz https://github.com/microsoft/vscode/issues/139572
const webview = _register(vscode.window.createWebviewPanel(webviewId, 'title', { viewColumn: vscode.ViewColumn.One }, {
enableScripts: true,
localResourceRoots: [workspaceFile('sub')]
}));
const imagePath = workspaceFile('sub', 'image.png').with({ scheme: 'vscode-resource' }).toString();
webview.webview.html = createHtmlDocumentWithBody(/*html*/`
<img src="${imagePath}">
<script>
const vscode = acquireVsCodeApi();
vscode.postMessage({ type: 'ready', userAgent: window.navigator.userAgent });
const img = document.getElementsByTagName('img')[0];
img.addEventListener('load', () => { vscode.postMessage({ value: true }); });
img.addEventListener('error', () => { vscode.postMessage({ value: false }); });
</script>`);
const ready = getMessage(webview);
if ((await ready).userAgent.indexOf('Firefox') >= 0) {
// Skip on firefox web for now.
// Firefox service workers never seem to get any 'fetch' requests here. Other browsers work fine
return;
}
const firstResponse = await sendReceiveMessage(webview, { src: imagePath.toString() });
assert.strictEqual(firstResponse.value, true);
});
test('webviews should have real view column after they are created, #56097', async () => {
const webview = _register(vscode.window.createWebviewPanel(webviewId, 'title', { viewColumn: vscode.ViewColumn.Active }, { enableScripts: true }));
// Since we used a symbolic column, we don't know what view column the webview will actually show in at first
assert.strictEqual(webview.viewColumn, undefined);
let changed = false;
const viewStateChanged = new Promise<vscode.WebviewPanelOnDidChangeViewStateEvent>((resolve) => {
webview.onDidChangeViewState(e => {
if (changed) {
throw new Error('Only expected a single view state change');
}
changed = true;
resolve(e);
}, undefined, disposables);
});
assert.strictEqual((await viewStateChanged).webviewPanel.viewColumn, vscode.ViewColumn.One);
const firstResponse = getMessage(webview);
webview.webview.html = createHtmlDocumentWithBody(/*html*/`
<script>
const vscode = acquireVsCodeApi();
vscode.postMessage({ });
</script>`);
webview.webview.postMessage({ value: 1 });
await firstResponse;
assert.strictEqual(webview.viewColumn, vscode.ViewColumn.One);
});
if (os.platform() === 'darwin') {
test.skip('webview can copy text from webview', async () => {
const expectedText = `webview text from: ${Date.now()}!`;
const webview = _register(vscode.window.createWebviewPanel(webviewId, 'title', { viewColumn: vscode.ViewColumn.One }, { enableScripts: true, retainContextWhenHidden: true }));
const ready = getMessage(webview);
webview.webview.html = createHtmlDocumentWithBody(/*html*/`
<b>${expectedText}</b>
<script>
const vscode = acquireVsCodeApi();
document.execCommand('selectAll');
vscode.postMessage({ type: 'ready' });
</script>`);
await ready;
await vscode.commands.executeCommand('editor.action.clipboardCopyAction');
await delay(200); // Make sure copy has time to reach webview
assert.strictEqual(await vscode.env.clipboard.readText(), expectedText);
});
}
test.skip('webviews should transfer ArrayBuffers to and from webviews', async () => {
const webview = _register(vscode.window.createWebviewPanel(webviewId, 'title', { viewColumn: vscode.ViewColumn.One }, { enableScripts: true, retainContextWhenHidden: true }));
const ready = getMessage(webview);
webview.webview.html = createHtmlDocumentWithBody(/*html*/`
<script>
const vscode = acquireVsCodeApi();
window.addEventListener('message', (message) => {
switch (message.data.type) {
case 'add1':
const arrayBuffer = message.data.array;
const uint8Array = new Uint8Array(arrayBuffer);
for (let i = 0; i < uint8Array.length; ++i) {
uint8Array[i] = uint8Array[i] + 1;
}
vscode.postMessage({ array: arrayBuffer }, [arrayBuffer]);
break;
}
});
vscode.postMessage({ type: 'ready' });
</script>`);
await ready;
const responsePromise = getMessage(webview);
const bufferLen = 100;
{
const arrayBuffer = new ArrayBuffer(bufferLen);
const uint8Array = new Uint8Array(arrayBuffer);
for (let i = 0; i < bufferLen; ++i) {
uint8Array[i] = i;
}
webview.webview.postMessage({
type: 'add1',
array: arrayBuffer
});
}
{
const response = await responsePromise;
assert.ok(response.array instanceof ArrayBuffer);
const uint8Array = new Uint8Array(response.array);
for (let i = 0; i < bufferLen; ++i) {
assert.strictEqual(uint8Array[i], i + 1);
}
}
});
test.skip('webviews should transfer Typed arrays to and from webviews', async () => {
const webview = _register(vscode.window.createWebviewPanel(webviewId, 'title', { viewColumn: vscode.ViewColumn.One }, { enableScripts: true, retainContextWhenHidden: true }));
const ready = getMessage(webview);
webview.webview.html = createHtmlDocumentWithBody(/*html*/`
<script>
const vscode = acquireVsCodeApi();
window.addEventListener('message', (message) => {
switch (message.data.type) {
case 'add1':
const uint8Array = message.data.array1;
// This should update both buffers since they use the same ArrayBuffer storage
const uint16Array = message.data.array2;
for (let i = 0; i < uint16Array.length; ++i) {
uint16Array[i] = uint16Array[i] + 1;
}
vscode.postMessage({ array1: uint8Array, array2: uint16Array, }, [uint16Array.buffer]);
break;
}
});
vscode.postMessage({ type: 'ready' });
</script>`);
await ready;
const responsePromise = getMessage(webview);
const bufferLen = 100;
{
const arrayBuffer = new ArrayBuffer(bufferLen);
const uint8Array = new Uint8Array(arrayBuffer);
const uint16Array = new Uint16Array(arrayBuffer);
for (let i = 0; i < uint16Array.length; ++i) {
uint16Array[i] = i;
}
webview.webview.postMessage({
type: 'add1',
array1: uint8Array,
array2: uint16Array,
});
}
{
const response = await responsePromise;
assert.ok(response.array1 instanceof Uint8Array);
assert.ok(response.array2 instanceof Uint16Array);
assert.ok(response.array1.buffer === response.array2.buffer);
const uint8Array = response.array1;
for (let i = 0; i < bufferLen; ++i) {
if (i % 2 === 0) {
assert.strictEqual(uint8Array[i], Math.floor(i / 2) + 1);
} else {
assert.strictEqual(uint8Array[i], 0);
}
}
}
});
});
function createHtmlDocumentWithBody(body: string): string {
return /*html*/`<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Document</title>
</head>
<body>
${body}
</body>
</html>`;
}
const statefulWebviewHtml = createHtmlDocumentWithBody(/*html*/ `
<script>
const vscode = acquireVsCodeApi();
let value = 0;
window.addEventListener('message', (message) => {
switch (message.data.type) {
case 'get':
vscode.postMessage({ value });
break;
case 'add':
++value;;
vscode.setState({ value });
vscode.postMessage({ value });
break;
}
});
vscode.postMessage({ type: 'ready' });
</script>`);
function getMessage<R = any>(webview: vscode.WebviewPanel): Promise<R> {
return new Promise<R>(resolve => {
const sub = webview.webview.onDidReceiveMessage(message => {
sub.dispose();
resolve(message);
});
});
}
function sendReceiveMessage<T = {}, R = any>(webview: vscode.WebviewPanel, message: T): Promise<R> {
const p = getMessage<R>(webview);
webview.webview.postMessage(message);
return p;
} | the_stack |
import { PageHeaderWrapper } from '@ant-design/pro-layout';
import React, { Fragment, useEffect, useState } from 'react';
import { Button, Card, Col, Divider, Form, Icon, Input, message, Popconfirm, Row, Table } from 'antd';
import apis from '@/services';
import { ConnectState, Dispatch } from '@/models/connect';
import { connect } from 'dva';
import { FormComponentProps } from 'antd/es/form';
import encodeQueryParam from '@/utils/encodeParam';
import { downloadObject } from '@/utils/utils';
import { PaginationConfig } from 'antd/lib/table';
import Save from './save';
import StandardFormRow from '../components/standard-form-row';
import TagSelect from '../components/tag-select';
import styles from '../index.less';
import Debug from './debugger';
import Upload from 'antd/lib/upload';
import { getAccessToken } from '@/utils/authority';
interface Props extends FormComponentProps {
dispatch: Dispatch;
noticeTemplate: any;
loading: boolean;
}
interface State {
typeList: any[];
activeType: string;
saveVisible: boolean;
currentItem: any;
searchParam: any;
filterType: string[];
filterName: string;
debugVisible: boolean;
}
const formItemLayout = {
wrapperCol: {
xs: { span: 24 },
sm: { span: 16 },
},
};
const Template: React.FC<Props> = props => {
const { noticeTemplate, loading, dispatch } = props;
const initState: State = {
typeList: [],
activeType: '',
saveVisible: false,
currentItem: {},
searchParam: {},
filterType: [],
filterName: '',
debugVisible: false,
};
const [typeList, setTypeList] = useState(initState.typeList);
// const [activeType, setActiveType] = useState(initState.activeType);
const [saveVisible, setSaveVisible] = useState(initState.saveVisible);
const [currentItem, setCurrentItem] = useState(initState.currentItem);
const [searchParam, setSearchParam] = useState(initState.searchParam);
const [filterType, setFilterType] = useState(initState.filterType);
const [filterName, setFilterName] = useState(initState.filterName);
const [debugVisible, setDebugVisible] = useState(initState.debugVisible);
const handlerSearch = (params?: any) => {
const temp = params;
temp.sorts = {
field: 'id',
order: 'desc',
};
dispatch({
type: 'noticeTemplate/query',
payload: encodeQueryParam(temp),
});
setSearchParam(temp);
};
const onSearch = (type?: string[], name?: string) => {
const tempType = type || filterType;
const tempName = name || filterName;
dispatch({
type: 'noticeTemplate/query',
payload: encodeQueryParam({
paging: false,
sorts: {
field: 'id',
order: 'desc',
},
terms: {
type$IN: tempType,
name$LIKE: name === '' ? undefined : tempName,
},
}),
});
};
useEffect(() => {
apis.notifier.configType().then((res: any) => {
if (res) {
setTypeList(res.result);
}
});
handlerSearch({
pageIndex: 0,
pageSize: 10,
});
}, []);
const remove = (record: any) => {
dispatch({
type: 'noticeTemplate/remove',
payload: record.id,
callback: (res) => {
if (res.status === 200) {
message.success('删除成功');
handlerSearch(searchParam);
}
},
});
};
const saveData = (item: any) => {
dispatch({
type: 'noticeTemplate/insert',
payload: item,
callback: (res) => {
if (res.status === 200) {
message.success('保存成功');
setSaveVisible(false);
handlerSearch(searchParam);
}
},
});
};
const onTableChange = (
pagination: PaginationConfig,
filters: any,
sorter: any,
// extra: any,
) => {
handlerSearch({
pageIndex: Number(pagination.current) - 1,
pageSize: pagination.pageSize,
terms: searchParam,
sorts: sorter,
});
};
const uploadProps = (item: any) => {
dispatch({
type: 'noticeTemplate/insert',
payload: item,
callback: (res) => {
if (res.status === 200) {
message.success('导入成功');
handlerSearch(searchParam);
}
},
});
};
/*const uploadProps: UploadProps = {
accept: '.json',
action: '/jetlinks/file/static',
headers: {
'X-Access-Token': getAccessToken(),
},
showUploadList: false,
onChange(info) {
if (info.file.status !== 'uploading') {
// console.log(info.file, info.fileList);
}
if (info.file.status === 'done') {
const fileUrl = info.file.response.result;
request(fileUrl, { method: 'GET' }).then(e => {
dispatch({
type: 'noticeTemplate/insert',
payload: e,
callback: () => {
message.success('导入成功');
handlerSearch(searchParam);
},
});
});
}
if (info.file.status === 'error') {
message.error(`${info.file.name} 导入失败.`);
}
},
};*/
return (
<PageHeaderWrapper title="通知模版">
<div className={styles.filterCardList}>
<Card bordered={false}>
<Form layout="inline">
<StandardFormRow title="组件类型" block style={{ paddingBottom: 11 }}>
<Form.Item>
<TagSelect
// expandable
onChange={(value: any[]) => {
setFilterType(value);
onSearch(value, undefined);
}}
>
{typeList?.map(item => (
<TagSelect.Option key={item.id} value={item.id}>
{item.name}
</TagSelect.Option>
))}
</TagSelect>
</Form.Item>
</StandardFormRow>
<StandardFormRow title="其它选项" grid last>
<Row gutter={16}>
<Col lg={8} md={10} sm={10} xs={24}>
<Form.Item {...formItemLayout} label="配置名称">
<Input
onChange={e => {
const tempValue = e.target.value;
setFilterName(tempValue);
onSearch(undefined, tempValue === '' ? undefined : tempValue);
}}
/>
</Form.Item>
</Col>
</Row>
</StandardFormRow>
</Form>
</Card>
<br />
<Card>
<Button
onClick={() => {
setCurrentItem({});
setSaveVisible(true);
}}
type="primary"
style={{ marginBottom: 16 }}
>
新建
</Button>
<Divider type="vertical" />
<Button
onClick={() => {
downloadObject(noticeTemplate.result?.data, '通知模板');
}}
style={{ marginBottom: 16 }}
>
导出配置
</Button>
<Divider type="vertical" />
{/*<Upload {...uploadProps}>
<Button type="primary" style={{ marginBottom: 16 }}>
导入配置
</Button>
</Upload>*/}
<Upload
action="/jetlinks/file/static"
headers={{
'X-Access-Token': getAccessToken(),
}}
showUploadList={false} accept='.json'
beforeUpload={(file) => {
const reader = new FileReader();
reader.readAsText(file);
reader.onload = (result) => {
try {
uploadProps(JSON.parse(result.target.result));
} catch (error) {
message.error('文件格式错误');
}
}
}}
>
<Button>
<Icon type="upload" />导入配置
</Button>
</Upload>
<Table
rowKey="id"
onChange={onTableChange}
loading={loading}
columns={[
{
dataIndex: 'id',
title: 'ID',
defaultSortOrder: 'descend',
},
{
dataIndex: 'name',
title: '模版名称',
},
{
dataIndex: 'type',
title: '通知类型',
},
{
dataIndex: 'provider',
title: '服务商',
},
{
dataIndex: 'option',
title: '操作',
render: (text, record: any) => (
<Fragment>
<a
onClick={() => {
setCurrentItem(record);
setSaveVisible(true);
}}
>
编辑
</a>
<Divider type="vertical" />
<Popconfirm
title="确认删除?"
onConfirm={() => {
remove(record);
}}
>
<a>删除</a>
</Popconfirm>
<Divider type="vertical" />
<a onClick={() => downloadObject(record, '通知模版')}>下载配置</a>
<Divider type="vertical" />
<a
onClick={() => {
setCurrentItem(record);
setDebugVisible(true);
}}
>
调试
</a>
</Fragment>
),
},
]}
dataSource={noticeTemplate.result?.data}
pagination={{
current: noticeTemplate.result?.pageIndex + 1,
total: noticeTemplate.result?.total,
pageSize: noticeTemplate.result?.pageSize,
showQuickJumper: true,
showSizeChanger: true,
pageSizeOptions: ['10', '20', '50', '100'],
showTotal: (total: number) =>
`共 ${total} 条记录 第 ${noticeTemplate.result?.pageIndex + 1}/${Math.ceil(
noticeTemplate.result?.total / noticeTemplate.result?.pageSize,
)}页`,
}}
/>
</Card>
</div>
{saveVisible && (
<Save
data={currentItem}
close={() => setSaveVisible(false)}
save={(item: any) => saveData(item)}
/>
)}
{debugVisible && <Debug data={currentItem} close={() => setDebugVisible(false)} />}
</PageHeaderWrapper>
);
};
export default connect(({ noticeTemplate, loading }: ConnectState) => ({
noticeTemplate,
loading: loading.models.noticeTemplate,
}))(Form.create<Props>()(Template)); | the_stack |
import { UniverseDetails } from '@augurproject/sdk/build/state/getter/Universe';
import {
ACCOUNTS,
defaultSeedPath,
fork,
loadSeed,
} from '@augurproject/tools';
import { TestContractAPI } from '@augurproject/tools';
import {
getPayoutNumerators,
makeValidScalarOutcome,
} from '@augurproject/tools/build/flash/fork';
import { TestEthersProvider } from '@augurproject/tools/build/libs/TestEthersProvider';
import { NULL_ADDRESS } from '@augurproject/tools/build/libs/Utils';
import { BigNumber } from 'bignumber.js';
import { formatBytes32String } from 'ethers/utils';
import { makeProvider } from '../../../libs';
import { SDKConfiguration } from '@augurproject/utils';
import { MarketInfo } from "@augurproject/sdk-lite";
describe('State API :: Universe :: ', () => {
let john: TestContractAPI;
let mary: TestContractAPI;
let bob: TestContractAPI;
let baseProvider: TestEthersProvider;
let config: SDKConfiguration;
beforeAll(async () => {
const seed = await loadSeed(defaultSeedPath);
baseProvider = await makeProvider(seed, ACCOUNTS);
config = baseProvider.getConfig();
john = await TestContractAPI.userWrapper(
ACCOUNTS[0],
baseProvider,
config
);
mary = await TestContractAPI.userWrapper(
ACCOUNTS[1],
baseProvider,
config
);
bob = await TestContractAPI.userWrapper(
ACCOUNTS[2],
baseProvider,
config
);
await john.approve();
await mary.approve();
await bob.approve();
});
beforeEach(async () => {
const provider = await baseProvider.fork();
john = await TestContractAPI.userWrapper(ACCOUNTS[0], provider, config);
mary = await TestContractAPI.userWrapper(ACCOUNTS[1], provider, config);
bob = await TestContractAPI.userWrapper(ACCOUNTS[2], provider, config);
});
// TODO Fix the 0x error occurring when multiple fork getter tests run in one file.
test('getForkMigrationTotals : YesNo', async () => {
const universe = john.augur.contracts.universe;
await john.sync();
let migrationTotals = await john.api.route('getForkMigrationTotals', {
universe: universe.address,
});
expect(migrationTotals).toEqual({});
const market = await john.createReasonableYesNoMarket();
await john.sync();
const marketInfo: MarketInfo = (await john.api.route('getMarketsInfo', {
marketIds: [market.address],
}))[0];
await fork(john, market);
const repTokenAddress = await john.augur.contracts.universe.getReputationToken_();
const repToken = john.augur.contracts.reputationTokenFromAddress(
repTokenAddress,
john.augur.config.networkId
);
const invalidNumerators = getPayoutNumerators(marketInfo, 0);
const noNumerators = getPayoutNumerators(marketInfo, 1);
await john.faucetRep(new BigNumber(1e21));
await john.augur.contracts.universe.createChildUniverse(invalidNumerators);
await repToken.migrateOutByPayout(invalidNumerators, new BigNumber(1e21));
await john.faucetRep(new BigNumber(1e21));
await john.augur.contracts.universe.createChildUniverse(noNumerators);
await repToken.migrateOutByPayout(noNumerators, new BigNumber(1e21));
await john.sync();
migrationTotals = await john.api.route('getForkMigrationTotals', {
universe: universe.address,
});
expect(migrationTotals).toMatchObject({
marketId: market.address,
outcomes: [
{
outcomeName: 'Invalid',
outcome: '0',
amount: '1000000000000000000000',
payoutNumerators: ['1000', '0', '0'],
},
{
outcomeName: 'No',
outcome: '1',
amount: '1000000000000000000000',
payoutNumerators: ['0', '1000', '0'],
},
],
});
});
test('getForkMigrationTotals : Categorical', async () => {
const universe = john.augur.contracts.universe;
await john.sync();
let migrationTotals = await john.api.route('getForkMigrationTotals', {
universe: universe.address,
});
expect(migrationTotals).toEqual({});
const market = await john.createReasonableMarket(
['foo', 'bar', 'happiness', 'smile'].map(formatBytes32String)
);
await john.sync();
const marketInfo = (await john.api.route('getMarketsInfo', {
marketIds: [market.address],
}))[0];
await fork(john, market);
const repTokenAddress = await john.augur.contracts.universe.getReputationToken_();
const repToken = john.augur.contracts.reputationTokenFromAddress(
repTokenAddress,
john.augur.config.networkId
);
const invalidNumerators = getPayoutNumerators(marketInfo, 0);
const fooNumerators = getPayoutNumerators(marketInfo, 1);
await john.faucetRep(new BigNumber(1e21));
await john.augur.contracts.universe.createChildUniverse(invalidNumerators);
await repToken.migrateOutByPayout(invalidNumerators, new BigNumber(1e21));
await john.faucetRep(new BigNumber(1e21));
await john.augur.contracts.universe.createChildUniverse(fooNumerators);
await repToken.migrateOutByPayout(fooNumerators, new BigNumber(1e21));
await john.sync();
migrationTotals = await john.api.route('getForkMigrationTotals', {
universe: universe.address,
});
expect(migrationTotals).toEqual({
marketId: market.address,
outcomes: [
{
outcomeName: 'Invalid',
outcome: '0',
isInvalid: true,
amount: '1000000000000000000000',
payoutNumerators: ['1000', '0', '0', '0', '0'],
},
{
outcomeName: 'foo',
outcome: '1',
amount: '1000000000000000000000',
payoutNumerators: ['0', '1000', '0', '0', '0'],
},
],
});
});
test('getForkMigrationTotals : Scalar', async () => {
const universe = john.augur.contracts.universe;
await john.sync();
let migrationTotals = await john.api.route('getForkMigrationTotals', {
universe: universe.address,
});
expect(migrationTotals).toEqual({});
const market = await john.createReasonableScalarMarket();
await john.sync();
const marketInfo = (await john.api.route('getMarketsInfo', {
marketIds: [market.address],
}))[0];
const invalidNumerators = getPayoutNumerators(marketInfo, 'invalid');
const fooOutcome = makeValidScalarOutcome(marketInfo);
const fooNumerators = getPayoutNumerators(marketInfo, fooOutcome);
await fork(john, market);
const repTokenAddress = await john.augur.contracts.universe.getReputationToken_();
const repToken = john.augur.contracts.reputationTokenFromAddress(
repTokenAddress,
john.augur.config.networkId
);
await john.faucetRep(new BigNumber(1e21));
await john.augur.contracts.universe.createChildUniverse(invalidNumerators);
await repToken.migrateOutByPayout(invalidNumerators, new BigNumber(1e21));
await john.faucetRep(new BigNumber(1e21));
await john.augur.contracts.universe.createChildUniverse(fooNumerators);
await repToken.migrateOutByPayout(fooNumerators, new BigNumber(1e21));
await john.sync();
migrationTotals = await john.api.route('getForkMigrationTotals', {
universe: universe.address,
});
expect(migrationTotals).toMatchObject({
marketId: market.address,
outcomes: [
{
outcomeName: 'Invalid',
outcome: '0',
amount: '1000000000000000000000',
isInvalid: true,
payoutNumerators: ['20000', '0', '0'],
},
{
outcomeName: '116000000000000000000',
outcome: '116000000000000000000',
amount: '1000000000000000000000',
payoutNumerators: ['0', '13400', '6600'],
},
],
});
});
test('getUniverseChildren : Genesis', async () => {
const genesisUniverse = john.augur.contracts.universe;
const legacyRep = new BigNumber(11000000).multipliedBy(10 ** 18);
let johnRep = await john.augur.contracts.reputationToken.balanceOf_(john.account.address);
let maryRep = new BigNumber(0);
const bobRep = new BigNumber(0);
let totalRep = await john.augur.contracts.reputationToken.totalSupply_();
await john.faucetRep(new BigNumber(91));
johnRep = johnRep.plus(91);
await mary.faucetRep(new BigNumber(19));
maryRep = maryRep.plus(19);
totalRep = totalRep.plus(91).plus(19);
// Verify from John's perspective.
console.log("Verify from John's perspective.");
await john.sync();
let universeChildren: UniverseDetails = await john.api.route(
'getUniverseChildren',
{
universe: genesisUniverse.address,
account: john.account.address,
}
);
expect(universeChildren).toMatchObject({
id: genesisUniverse.address,
parentUniverseId: NULL_ADDRESS,
outcomeName: 'Genesis',
usersRep: johnRep.toFixed(),
totalRepSupply: totalRep.toFixed(),
totalOpenInterest: '0',
numberOfMarkets: 1, // includes warp sync market
children: [],
});
expect(universeChildren.creationTimestamp).toBeGreaterThan(0);
// Verify from Bob's perspective.
// Tests case where there aren't any TokenBalanceChanged logs.
console.log("Verify from Bob's perspective.");
await john.sync();
universeChildren = await john.api.route('getUniverseChildren', {
universe: genesisUniverse.address,
account: bob.account.address,
});
expect(universeChildren).toMatchObject({
id: genesisUniverse.address,
parentUniverseId: NULL_ADDRESS,
outcomeName: 'Genesis',
usersRep: bobRep.toFixed(), // aka zero
totalRepSupply: totalRep.toFixed(),
totalOpenInterest: '0',
numberOfMarkets: 1, // includes warp sync market
children: [],
});
expect(universeChildren.creationTimestamp).toBeGreaterThan(0);
// Create a market to see how that affects numberOfMarkets.
console.log('Create a market to see how that affects numberOfMarkets.');
const repBond = await genesisUniverse.getOrCacheMarketRepBond_();
const market = await john.createReasonableScalarMarket();
johnRep = johnRep.minus(repBond);
await john.sync();
universeChildren = await john.api.route('getUniverseChildren', {
universe: genesisUniverse.address,
account: john.account.address,
});
expect(universeChildren).toMatchObject({
id: genesisUniverse.address,
parentUniverseId: NULL_ADDRESS,
outcomeName: 'Genesis',
usersRep: johnRep.toFixed(),
totalRepSupply: totalRep.toFixed(),
totalOpenInterest: '0',
numberOfMarkets: 2, // includes warp sync market
children: [],
});
expect(universeChildren.creationTimestamp).toBeGreaterThan(0);
// Fork to see how that affects the children.
console.log('Fork to see how that affects the children.');
const marketInfo = (await john.api.route('getMarketsInfo', {
marketIds: [market.address],
}))[0];
await fork(john, market);
const repTokenAddress = await john.augur.contracts.universe.getReputationToken_();
const repToken = john.augur.contracts.reputationTokenFromAddress(
repTokenAddress,
john.augur.config.networkId
);
// The fork script faucets a lot of REP then uses up a difficult-to-predict amount.
johnRep = await repToken.balanceOf_(john.account.address);
totalRep = await repToken.totalSupply_();
const invalidNumerators = getPayoutNumerators(marketInfo, 'invalid');
const childUniverseRep = johnRep;
// Call twice because there's a bug when the first migration meets the goal.
await john.augur.contracts.universe.createChildUniverse(invalidNumerators);
await repToken.migrateOutByPayout(invalidNumerators, new BigNumber(1));
await repToken.migrateOutByPayout(
invalidNumerators,
childUniverseRep.minus(1)
);
johnRep = johnRep.minus(childUniverseRep);
totalRep = totalRep.minus(childUniverseRep);
await john.sync();
universeChildren = await john.api.route('getUniverseChildren', {
universe: genesisUniverse.address,
account: john.account.address,
});
expect(universeChildren).toMatchObject({
id: genesisUniverse.address,
outcomeName: 'Genesis',
usersRep: '0', // all all migrated out
totalRepSupply: totalRep.toFixed(),
totalOpenInterest: '0',
numberOfMarkets: 2, // includes warp sync market
parentUniverseId: NULL_ADDRESS,
children: [
{
parentUniverseId: genesisUniverse.address,
outcomeName: 'Invalid',
usersRep: childUniverseRep.toFixed(),
totalRepSupply: childUniverseRep.toFixed(),
totalOpenInterest: '0',
numberOfMarkets: 0,
children: [],
},
],
});
expect(universeChildren.creationTimestamp).toBeGreaterThan(0);
expect(universeChildren.children[0].creationTimestamp).toBeGreaterThan(0);
expect(universeChildren.children[0].id).not.toEqual(NULL_ADDRESS);
});
}); | the_stack |
import refractor from 'refractor/core';
import {
ApplySchemaAttributes,
assertGet,
command,
CommandFunction,
CreateExtensionPlugin,
extension,
ExtensionTag,
findNodeAtSelection,
findParentNodeOfType,
GetAttributes,
getMatchString,
getStyle,
InputRule,
isElementDomNode,
isNodeActive,
isNodeOfType,
isTextSelection,
keyBinding,
KeyBindingProps,
NamedShortcut,
NodeExtension,
NodeExtensionSpec,
nodeInputRule,
NodeSpecOverride,
OnSetOptionsProps,
PosProps,
ProsemirrorAttributes,
removeNodeAtPosition,
replaceNodeAtPosition,
setBlockType,
toggleBlockItem,
} from '@remirror/core';
import { keydownHandler } from '@remirror/pm/keymap';
import { TextSelection } from '@remirror/pm/state';
import { ExtensionCodeBlockTheme as Theme } from '@remirror/theme';
import { CodeBlockState } from './code-block-plugin';
import type { CodeBlockAttributes, CodeBlockOptions } from './code-block-types';
import {
codeBlockToDOM,
formatCodeBlockFactory,
getLanguage,
getLanguageFromDom,
toggleCodeBlockOptions,
updateNodeAttributes,
} from './code-block-utils';
@extension<CodeBlockOptions>({
defaultOptions: {
supportedLanguages: [],
toggleName: 'paragraph',
formatter: ({ source }) => ({ cursorOffset: 0, formatted: source }),
syntaxTheme: 'a11y_dark',
defaultLanguage: 'markup',
defaultWrap: false,
// See https://github.com/remirror/remirror/issues/624 for the ''
plainTextClassName: '',
getLanguageFromDom,
},
staticKeys: ['getLanguageFromDom'],
})
export class CodeBlockExtension extends NodeExtension<CodeBlockOptions> {
get name() {
return 'codeBlock' as const;
}
createTags() {
return [ExtensionTag.Block, ExtensionTag.Code];
}
/**
* Add the languages to the environment if they have not yet been added.
*/
protected init(): void {
this.registerLanguages();
}
createNodeSpec(extra: ApplySchemaAttributes, override: NodeSpecOverride): NodeExtensionSpec {
const githubHighlightRegExp = /highlight-(?:text|source)-([\da-z]+)/;
return {
content: 'text*',
marks: '',
defining: true,
isolating: true,
draggable: false,
...override,
code: true,
attrs: {
...extra.defaults(),
language: { default: this.options.defaultLanguage },
wrap: { default: this.options.defaultWrap },
},
parseDOM: [
// Add support for github code blocks.
{
tag: 'div.highlight',
preserveWhitespace: 'full',
getAttrs: (node) => {
if (!isElementDomNode(node)) {
return false;
}
const codeElement = node.querySelector('pre.code');
if (!isElementDomNode(codeElement)) {
return false;
}
const wrap = getStyle(codeElement, 'white-space') === 'pre-wrap';
const language = node.className
.match(githubHighlightRegExp)?.[1]
?.replace('language-', '');
return { ...extra.parse(node), language, wrap };
},
},
{
tag: 'pre',
preserveWhitespace: 'full',
getAttrs: (node) => {
if (!isElementDomNode(node)) {
return false;
}
const codeElement = node.querySelector('code');
if (!isElementDomNode(codeElement)) {
return false;
}
const wrap = getStyle(codeElement, 'white-space') === 'pre-wrap';
const language = this.options.getLanguageFromDom(codeElement, node);
return { ...extra.parse(node), language, wrap };
},
},
...(override.parseDOM ?? []),
],
toDOM: (node) => codeBlockToDOM(node, extra),
};
}
/**
* Add the syntax theme class to the editor.
*/
createAttributes(): ProsemirrorAttributes {
return { class: (Theme as any)[this.options.syntaxTheme.toUpperCase()] };
}
/**
* Create an input rule that listens converts the code fence into a code block
* when typing triple back tick followed by a space.
*/
createInputRules(): InputRule[] {
const regexp = /^```([\dA-Za-z]*) $/;
const getAttributes: GetAttributes = (match) => {
const language = getLanguage({
language: getMatchString(match, 1),
fallback: this.options.defaultLanguage,
});
return { language };
};
return [
nodeInputRule({
regexp,
type: this.type,
beforeDispatch: ({ tr, start }) => {
const $pos = tr.doc.resolve(start);
tr.setSelection(new TextSelection($pos));
},
getAttributes: getAttributes,
}),
];
}
protected onSetOptions(props: OnSetOptionsProps<CodeBlockOptions>): void {
const { changes } = props;
if (changes.supportedLanguages.changed) {
// Update the registered languages when language support is dynamically
// added.
this.registerLanguages();
}
if (changes.syntaxTheme.changed) {
// Update the attributes when the syntax theme changes to add the new
// style to the main editor element.
this.store.updateAttributes();
}
}
/**
* Create the custom code block plugin which handles the delete key amongst other things.
*/
createPlugin(): CreateExtensionPlugin<CodeBlockState> {
const pluginState = new CodeBlockState(this.type, this);
/**
* Handles deletions within the plugin state.
*/
const handler = () => {
pluginState.setDeleted(true);
// Return false to allow any lower priority keyboard handlers to run.
return false;
};
return {
state: {
init(_, state) {
return pluginState.init(state);
},
apply(tr, _, __, state) {
return pluginState.apply(tr, state);
},
},
props: {
handleKeyDown: keydownHandler({
Backspace: handler,
'Mod-Backspace': handler,
Delete: handler,
'Mod-Delete': handler,
'Ctrl-h': handler,
'Alt-Backspace': handler,
'Ctrl-d': handler,
'Ctrl-Alt-Backspace': handler,
'Alt-Delete': handler,
'Alt-d': handler,
}),
decorations() {
pluginState.setDeleted(false);
return pluginState.decorationSet;
},
},
};
}
/**
* Call this method to toggle the code block.
*
* @remarks
*
* ```ts
* actions.toggleCodeBlock({ language: 'ts' });
* ```
*
* The above makes the current node a codeBlock with the language ts or
* remove the code block altogether.
*/
@command(toggleCodeBlockOptions)
toggleCodeBlock(attributes: Partial<CodeBlockAttributes>): CommandFunction {
return toggleBlockItem({
type: this.type,
toggleType: this.options.toggleName,
attrs: { language: this.options.defaultLanguage, ...attributes },
});
}
/**
* Creates a code at the current position.
*
* ```ts
* commands.createCodeBlock({ language: 'js' });
* ```
*/
@command()
createCodeBlock(attributes: CodeBlockAttributes): CommandFunction {
return setBlockType(this.type, attributes);
}
/**
* Update the code block at the current position. Primarily this is used
* to change the language.
*
* ```ts
* if (commands.updateCodeBlock.isEnabled()) {
* commands.updateCodeBlock({ language: 'markdown' });
* }
* ```
*/
@command()
updateCodeBlock(attributes: CodeBlockAttributes): CommandFunction {
return updateNodeAttributes(this.type)(attributes);
}
/**
* Format the code block with the code formatting function passed as an
* option.
*
* Code formatters (like prettier) add a lot to the bundle size and hence
* it is up to you to provide a formatter which will be run on the entire
* code block when this method is used.
*
* ```ts
* if (actions.formatCodeBlock.isActive()) {
* actions.formatCodeBlockFactory();
* // Or with a specific position
* actions.formatCodeBlock({ pos: 100 }) // to format a separate code block
* }
* ```
*/
@command()
formatCodeBlock(props?: Partial<PosProps>): CommandFunction {
return formatCodeBlockFactory({
type: this.type,
formatter: this.options.formatter,
defaultLanguage: this.options.defaultLanguage,
})(props);
}
@keyBinding({ shortcut: 'Tab' })
tabKey({ state, dispatch }: KeyBindingProps): boolean {
const { selection, tr, schema } = state;
// Check to ensure that this is the correct node.
const { node } = findNodeAtSelection(selection);
if (!isNodeOfType({ node, types: this.type })) {
return false;
}
if (selection.empty) {
tr.insertText('\t');
} else {
const { from, to } = selection;
tr.replaceWith(from, to, schema.text('\t'));
}
if (dispatch) {
dispatch(tr);
}
return true;
}
@keyBinding({ shortcut: 'Backspace' })
backspaceKey({ dispatch, tr, state }: KeyBindingProps): boolean {
// If the selection is not empty, return false and let other extension
// (ie: BaseKeymapExtension) to do the deleting operation.
if (!tr.selection.empty) {
return false;
}
// Check that this is the correct node.
const parent = findParentNodeOfType({ types: this.type, selection: tr.selection });
if (parent?.start !== tr.selection.from) {
return false;
}
const { pos, node, start } = parent;
const toggleNode = assertGet(state.schema.nodes, this.options.toggleName);
if (node.textContent.trim() === '') {
// eslint-disable-next-line unicorn/consistent-destructuring
if (tr.doc.lastChild === node && tr.doc.firstChild === node) {
replaceNodeAtPosition({ pos, tr, content: toggleNode.create() });
} else {
removeNodeAtPosition({ pos, tr });
}
} else if (start > 2) {
// Jump to the previous node.
tr.setSelection(TextSelection.create(tr.doc, start - 2));
} else {
// There is no content before the codeBlock so simply create a new
// block and jump into it.
tr.insert(0, toggleNode.create());
tr.setSelection(TextSelection.create(tr.doc, 1));
}
if (dispatch) {
dispatch(tr);
}
return true;
}
@keyBinding({ shortcut: 'Enter' })
enterKey({ dispatch, tr }: KeyBindingProps): boolean {
if (!(isTextSelection(tr.selection) && tr.selection.empty)) {
return false;
}
const { nodeBefore, parent } = tr.selection.$anchor;
if (!nodeBefore || !nodeBefore.isText || !parent.type.isTextblock) {
return false;
}
const regex = /^```([A-Za-z]*)?$/;
const { text, nodeSize } = nodeBefore;
const { textContent } = parent;
if (!text) {
return false;
}
const matchesNodeBefore = text.match(regex);
const matchesParent = textContent.match(regex);
if (!matchesNodeBefore || !matchesParent) {
return false;
}
const [, lang] = matchesNodeBefore;
const language = getLanguage({
language: lang,
fallback: this.options.defaultLanguage,
});
const pos = tr.selection.$from.before();
const end = pos + nodeSize + 1; // +1 to account for the extra pos a node takes up
tr.replaceWith(pos, end, this.type.create({ language }));
// Set the selection to within the codeBlock
tr.setSelection(TextSelection.create(tr.doc, pos + 1));
if (dispatch) {
dispatch(tr);
}
return true;
}
@keyBinding({ shortcut: NamedShortcut.Format })
formatShortcut({ tr }: KeyBindingProps): boolean {
const commands = this.store.commands;
if (!isNodeActive({ type: this.type, state: tr })) {
return false;
}
const enabled = commands.formatCodeBlock.isEnabled();
if (enabled) {
commands.formatCodeBlock();
}
return enabled;
}
/**
* Register passed in languages.
*/
private registerLanguages() {
for (const language of this.options.supportedLanguages) {
refractor.register(language);
}
}
}
export { getLanguage };
declare global {
namespace Remirror {
interface AllExtensions {
codeBlock: CodeBlockExtension;
}
}
} | the_stack |
import { PagedAsyncIterableIterator } from "@azure/core-paging";
import { Jobs } from "../operationsInterfaces";
import * as coreClient from "@azure/core-client";
import * as Mappers from "../models/mappers";
import * as Parameters from "../models/parameters";
import { AzureMediaServices } from "../azureMediaServices";
import {
Job,
JobsListNextOptionalParams,
JobsListOptionalParams,
JobsListResponse,
JobsGetOptionalParams,
JobsGetResponse,
JobsCreateOptionalParams,
JobsCreateResponse,
JobsDeleteOptionalParams,
JobsUpdateOptionalParams,
JobsUpdateResponse,
JobsCancelJobOptionalParams,
JobsListNextResponse
} from "../models";
/// <reference lib="esnext.asynciterable" />
/** Class containing Jobs operations. */
export class JobsImpl implements Jobs {
private readonly client: AzureMediaServices;
/**
* Initialize a new instance of the class Jobs class.
* @param client Reference to the service client
*/
constructor(client: AzureMediaServices) {
this.client = client;
}
/**
* Lists all of the Jobs for the Transform.
* @param resourceGroupName The name of the resource group within the Azure subscription.
* @param accountName The Media Services account name.
* @param transformName The Transform name.
* @param options The options parameters.
*/
public list(
resourceGroupName: string,
accountName: string,
transformName: string,
options?: JobsListOptionalParams
): PagedAsyncIterableIterator<Job> {
const iter = this.listPagingAll(
resourceGroupName,
accountName,
transformName,
options
);
return {
next() {
return iter.next();
},
[Symbol.asyncIterator]() {
return this;
},
byPage: () => {
return this.listPagingPage(
resourceGroupName,
accountName,
transformName,
options
);
}
};
}
private async *listPagingPage(
resourceGroupName: string,
accountName: string,
transformName: string,
options?: JobsListOptionalParams
): AsyncIterableIterator<Job[]> {
let result = await this._list(
resourceGroupName,
accountName,
transformName,
options
);
yield result.value || [];
let continuationToken = result.odataNextLink;
while (continuationToken) {
result = await this._listNext(
resourceGroupName,
accountName,
transformName,
continuationToken,
options
);
continuationToken = result.odataNextLink;
yield result.value || [];
}
}
private async *listPagingAll(
resourceGroupName: string,
accountName: string,
transformName: string,
options?: JobsListOptionalParams
): AsyncIterableIterator<Job> {
for await (const page of this.listPagingPage(
resourceGroupName,
accountName,
transformName,
options
)) {
yield* page;
}
}
/**
* Lists all of the Jobs for the Transform.
* @param resourceGroupName The name of the resource group within the Azure subscription.
* @param accountName The Media Services account name.
* @param transformName The Transform name.
* @param options The options parameters.
*/
private _list(
resourceGroupName: string,
accountName: string,
transformName: string,
options?: JobsListOptionalParams
): Promise<JobsListResponse> {
return this.client.sendOperationRequest(
{ resourceGroupName, accountName, transformName, options },
listOperationSpec
);
}
/**
* Gets a Job.
* @param resourceGroupName The name of the resource group within the Azure subscription.
* @param accountName The Media Services account name.
* @param transformName The Transform name.
* @param jobName The Job name.
* @param options The options parameters.
*/
get(
resourceGroupName: string,
accountName: string,
transformName: string,
jobName: string,
options?: JobsGetOptionalParams
): Promise<JobsGetResponse> {
return this.client.sendOperationRequest(
{ resourceGroupName, accountName, transformName, jobName, options },
getOperationSpec
);
}
/**
* Creates a Job.
* @param resourceGroupName The name of the resource group within the Azure subscription.
* @param accountName The Media Services account name.
* @param transformName The Transform name.
* @param jobName The Job name.
* @param parameters The request parameters
* @param options The options parameters.
*/
create(
resourceGroupName: string,
accountName: string,
transformName: string,
jobName: string,
parameters: Job,
options?: JobsCreateOptionalParams
): Promise<JobsCreateResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
accountName,
transformName,
jobName,
parameters,
options
},
createOperationSpec
);
}
/**
* Deletes a Job.
* @param resourceGroupName The name of the resource group within the Azure subscription.
* @param accountName The Media Services account name.
* @param transformName The Transform name.
* @param jobName The Job name.
* @param options The options parameters.
*/
delete(
resourceGroupName: string,
accountName: string,
transformName: string,
jobName: string,
options?: JobsDeleteOptionalParams
): Promise<void> {
return this.client.sendOperationRequest(
{ resourceGroupName, accountName, transformName, jobName, options },
deleteOperationSpec
);
}
/**
* Update is only supported for description and priority. Updating Priority will take effect when the
* Job state is Queued or Scheduled and depending on the timing the priority update may be ignored.
* @param resourceGroupName The name of the resource group within the Azure subscription.
* @param accountName The Media Services account name.
* @param transformName The Transform name.
* @param jobName The Job name.
* @param parameters The request parameters
* @param options The options parameters.
*/
update(
resourceGroupName: string,
accountName: string,
transformName: string,
jobName: string,
parameters: Job,
options?: JobsUpdateOptionalParams
): Promise<JobsUpdateResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
accountName,
transformName,
jobName,
parameters,
options
},
updateOperationSpec
);
}
/**
* Cancel a Job.
* @param resourceGroupName The name of the resource group within the Azure subscription.
* @param accountName The Media Services account name.
* @param transformName The Transform name.
* @param jobName The Job name.
* @param options The options parameters.
*/
cancelJob(
resourceGroupName: string,
accountName: string,
transformName: string,
jobName: string,
options?: JobsCancelJobOptionalParams
): Promise<void> {
return this.client.sendOperationRequest(
{ resourceGroupName, accountName, transformName, jobName, options },
cancelJobOperationSpec
);
}
/**
* ListNext
* @param resourceGroupName The name of the resource group within the Azure subscription.
* @param accountName The Media Services account name.
* @param transformName The Transform name.
* @param nextLink The nextLink from the previous successful call to the List method.
* @param options The options parameters.
*/
private _listNext(
resourceGroupName: string,
accountName: string,
transformName: string,
nextLink: string,
options?: JobsListNextOptionalParams
): Promise<JobsListNextResponse> {
return this.client.sendOperationRequest(
{ resourceGroupName, accountName, transformName, nextLink, options },
listNextOperationSpec
);
}
}
// Operation Specifications
const serializer = coreClient.createSerializer(Mappers, /* isXml */ false);
const listOperationSpec: coreClient.OperationSpec = {
path:
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Media/mediaServices/{accountName}/transforms/{transformName}/jobs",
httpMethod: "GET",
responses: {
200: {
bodyMapper: Mappers.JobCollection
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
queryParameters: [
Parameters.apiVersion,
Parameters.filter,
Parameters.orderby
],
urlParameters: [
Parameters.$host,
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.accountName,
Parameters.transformName
],
headerParameters: [Parameters.accept],
serializer
};
const getOperationSpec: coreClient.OperationSpec = {
path:
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Media/mediaServices/{accountName}/transforms/{transformName}/jobs/{jobName}",
httpMethod: "GET",
responses: {
200: {
bodyMapper: Mappers.Job
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
queryParameters: [Parameters.apiVersion],
urlParameters: [
Parameters.$host,
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.accountName,
Parameters.transformName,
Parameters.jobName
],
headerParameters: [Parameters.accept],
serializer
};
const createOperationSpec: coreClient.OperationSpec = {
path:
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Media/mediaServices/{accountName}/transforms/{transformName}/jobs/{jobName}",
httpMethod: "PUT",
responses: {
201: {
bodyMapper: Mappers.Job
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
requestBody: Parameters.parameters12,
queryParameters: [Parameters.apiVersion],
urlParameters: [
Parameters.$host,
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.accountName,
Parameters.transformName,
Parameters.jobName
],
headerParameters: [Parameters.accept, Parameters.contentType],
mediaType: "json",
serializer
};
const deleteOperationSpec: coreClient.OperationSpec = {
path:
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Media/mediaServices/{accountName}/transforms/{transformName}/jobs/{jobName}",
httpMethod: "DELETE",
responses: {
200: {},
204: {},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
queryParameters: [Parameters.apiVersion],
urlParameters: [
Parameters.$host,
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.accountName,
Parameters.transformName,
Parameters.jobName
],
headerParameters: [Parameters.accept],
serializer
};
const updateOperationSpec: coreClient.OperationSpec = {
path:
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Media/mediaServices/{accountName}/transforms/{transformName}/jobs/{jobName}",
httpMethod: "PATCH",
responses: {
200: {
bodyMapper: Mappers.Job
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
requestBody: Parameters.parameters12,
queryParameters: [Parameters.apiVersion],
urlParameters: [
Parameters.$host,
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.accountName,
Parameters.transformName,
Parameters.jobName
],
headerParameters: [Parameters.accept, Parameters.contentType],
mediaType: "json",
serializer
};
const cancelJobOperationSpec: coreClient.OperationSpec = {
path:
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Media/mediaServices/{accountName}/transforms/{transformName}/jobs/{jobName}/cancelJob",
httpMethod: "POST",
responses: {
200: {},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
queryParameters: [Parameters.apiVersion],
urlParameters: [
Parameters.$host,
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.accountName,
Parameters.transformName,
Parameters.jobName
],
headerParameters: [Parameters.accept],
serializer
};
const listNextOperationSpec: coreClient.OperationSpec = {
path: "{nextLink}",
httpMethod: "GET",
responses: {
200: {
bodyMapper: Mappers.JobCollection
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
queryParameters: [
Parameters.apiVersion,
Parameters.filter,
Parameters.orderby
],
urlParameters: [
Parameters.$host,
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.accountName,
Parameters.nextLink,
Parameters.transformName
],
headerParameters: [Parameters.accept],
serializer
}; | the_stack |
import * as pulumi from "@pulumi/pulumi";
import * as utilities from "../utilities";
/**
* Provides an Neptune Cluster Resource. A Cluster Resource defines attributes that are
* applied to the entire cluster of Neptune Cluster Instances.
*
* Changes to a Neptune Cluster can occur when you manually change a
* parameter, such as `backupRetentionPeriod`, and are reflected in the next maintenance
* window. Because of this, this provider may report a difference in its planning
* phase because a modification has not yet taken place. You can use the
* `applyImmediately` flag to instruct the service to apply the change immediately
* (see documentation below).
*
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as aws from "@pulumi/aws";
*
* const defaultCluster = new aws.neptune.Cluster("default", {
* applyImmediately: true,
* backupRetentionPeriod: 5,
* clusterIdentifier: "neptune-cluster-demo",
* engine: "neptune",
* iamDatabaseAuthenticationEnabled: true,
* preferredBackupWindow: "07:00-09:00",
* skipFinalSnapshot: true,
* });
* ```
*
* > **Note:** AWS Neptune does not support user name/password–based access control.
* See the AWS [Docs](https://docs.aws.amazon.com/neptune/latest/userguide/limits.html) for more information.
*
* ## Import
*
* `aws_neptune_cluster` can be imported by using the cluster identifier, e.g.
*
* ```sh
* $ pulumi import aws:neptune/cluster:Cluster example my-cluster
* ```
*/
export class Cluster extends pulumi.CustomResource {
/**
* Get an existing Cluster resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: ClusterState, opts?: pulumi.CustomResourceOptions): Cluster {
return new Cluster(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'aws:neptune/cluster:Cluster';
/**
* Returns true if the given object is an instance of Cluster. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is Cluster {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === Cluster.__pulumiType;
}
/**
* Specifies whether any cluster modifications are applied immediately, or during the next maintenance window. Default is `false`.
*/
public readonly applyImmediately!: pulumi.Output<boolean>;
/**
* The Neptune Cluster Amazon Resource Name (ARN)
*/
public /*out*/ readonly arn!: pulumi.Output<string>;
/**
* A list of EC2 Availability Zones that instances in the Neptune cluster can be created in.
*/
public readonly availabilityZones!: pulumi.Output<string[]>;
/**
* The days to retain backups for. Default `1`
*/
public readonly backupRetentionPeriod!: pulumi.Output<number | undefined>;
/**
* The cluster identifier. If omitted, this provider will assign a random, unique identifier.
*/
public readonly clusterIdentifier!: pulumi.Output<string>;
/**
* Creates a unique cluster identifier beginning with the specified prefix. Conflicts with `clusterIdentifier`.
*/
public readonly clusterIdentifierPrefix!: pulumi.Output<string>;
/**
* List of Neptune Instances that are a part of this cluster
*/
public /*out*/ readonly clusterMembers!: pulumi.Output<string[]>;
/**
* The Neptune Cluster Resource ID
*/
public /*out*/ readonly clusterResourceId!: pulumi.Output<string>;
/**
* If set to true, tags are copied to any snapshot of the DB cluster that is created.
*/
public readonly copyTagsToSnapshot!: pulumi.Output<boolean | undefined>;
/**
* A value that indicates whether the DB cluster has deletion protection enabled.The database can't be deleted when deletion protection is enabled. By default, deletion protection is disabled.
*/
public readonly deletionProtection!: pulumi.Output<boolean | undefined>;
/**
* A list of the log types this DB cluster is configured to export to Cloudwatch Logs. Currently only supports `audit`.
*/
public readonly enableCloudwatchLogsExports!: pulumi.Output<string[] | undefined>;
/**
* The DNS address of the Neptune instance
*/
public /*out*/ readonly endpoint!: pulumi.Output<string>;
/**
* The name of the database engine to be used for this Neptune cluster. Defaults to `neptune`.
*/
public readonly engine!: pulumi.Output<string | undefined>;
/**
* The database engine version.
*/
public readonly engineVersion!: pulumi.Output<string>;
/**
* The name of your final Neptune snapshot when this Neptune cluster is deleted. If omitted, no final snapshot will be made.
*/
public readonly finalSnapshotIdentifier!: pulumi.Output<string | undefined>;
/**
* The Route53 Hosted Zone ID of the endpoint
*/
public /*out*/ readonly hostedZoneId!: pulumi.Output<string>;
/**
* Specifies whether or mappings of AWS Identity and Access Management (IAM) accounts to database accounts is enabled.
*/
public readonly iamDatabaseAuthenticationEnabled!: pulumi.Output<boolean | undefined>;
/**
* A List of ARNs for the IAM roles to associate to the Neptune Cluster.
*/
public readonly iamRoles!: pulumi.Output<string[] | undefined>;
/**
* The ARN for the KMS encryption key. When specifying `kmsKeyArn`, `storageEncrypted` needs to be set to true.
*/
public readonly kmsKeyArn!: pulumi.Output<string>;
/**
* A cluster parameter group to associate with the cluster.
*/
public readonly neptuneClusterParameterGroupName!: pulumi.Output<string | undefined>;
/**
* A Neptune subnet group to associate with this Neptune instance.
*/
public readonly neptuneSubnetGroupName!: pulumi.Output<string>;
/**
* The port on which the Neptune accepts connections. Default is `8182`.
*/
public readonly port!: pulumi.Output<number | undefined>;
/**
* The daily time range during which automated backups are created if automated backups are enabled using the BackupRetentionPeriod parameter. Time in UTC. Default: A 30-minute window selected at random from an 8-hour block of time per region. e.g. 04:00-09:00
*/
public readonly preferredBackupWindow!: pulumi.Output<string>;
/**
* The weekly time range during which system maintenance can occur, in (UTC) e.g. wed:04:00-wed:04:30
*/
public readonly preferredMaintenanceWindow!: pulumi.Output<string>;
/**
* A read-only endpoint for the Neptune cluster, automatically load-balanced across replicas
*/
public /*out*/ readonly readerEndpoint!: pulumi.Output<string>;
/**
* ARN of a source Neptune cluster or Neptune instance if this Neptune cluster is to be created as a Read Replica.
*/
public readonly replicationSourceIdentifier!: pulumi.Output<string | undefined>;
/**
* Determines whether a final Neptune snapshot is created before the Neptune cluster is deleted. If true is specified, no Neptune snapshot is created. If false is specified, a Neptune snapshot is created before the Neptune cluster is deleted, using the value from `finalSnapshotIdentifier`. Default is `false`.
*/
public readonly skipFinalSnapshot!: pulumi.Output<boolean | undefined>;
/**
* Specifies whether or not to create this cluster from a snapshot. You can use either the name or ARN when specifying a Neptune cluster snapshot, or the ARN when specifying a Neptune snapshot.
*/
public readonly snapshotIdentifier!: pulumi.Output<string | undefined>;
/**
* Specifies whether the Neptune cluster is encrypted. The default is `false` if not specified.
*/
public readonly storageEncrypted!: pulumi.Output<boolean | undefined>;
/**
* A map of tags to assign to the Neptune cluster. .If configured with a provider `defaultTags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
*/
public readonly tags!: pulumi.Output<{[key: string]: string} | undefined>;
/**
* A map of tags assigned to the resource, including those inherited from the provider .
*/
public /*out*/ readonly tagsAll!: pulumi.Output<{[key: string]: string}>;
/**
* List of VPC security groups to associate with the Cluster
*/
public readonly vpcSecurityGroupIds!: pulumi.Output<string[]>;
/**
* Create a Cluster resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args?: ClusterArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: ClusterArgs | ClusterState, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState as ClusterState | undefined;
inputs["applyImmediately"] = state ? state.applyImmediately : undefined;
inputs["arn"] = state ? state.arn : undefined;
inputs["availabilityZones"] = state ? state.availabilityZones : undefined;
inputs["backupRetentionPeriod"] = state ? state.backupRetentionPeriod : undefined;
inputs["clusterIdentifier"] = state ? state.clusterIdentifier : undefined;
inputs["clusterIdentifierPrefix"] = state ? state.clusterIdentifierPrefix : undefined;
inputs["clusterMembers"] = state ? state.clusterMembers : undefined;
inputs["clusterResourceId"] = state ? state.clusterResourceId : undefined;
inputs["copyTagsToSnapshot"] = state ? state.copyTagsToSnapshot : undefined;
inputs["deletionProtection"] = state ? state.deletionProtection : undefined;
inputs["enableCloudwatchLogsExports"] = state ? state.enableCloudwatchLogsExports : undefined;
inputs["endpoint"] = state ? state.endpoint : undefined;
inputs["engine"] = state ? state.engine : undefined;
inputs["engineVersion"] = state ? state.engineVersion : undefined;
inputs["finalSnapshotIdentifier"] = state ? state.finalSnapshotIdentifier : undefined;
inputs["hostedZoneId"] = state ? state.hostedZoneId : undefined;
inputs["iamDatabaseAuthenticationEnabled"] = state ? state.iamDatabaseAuthenticationEnabled : undefined;
inputs["iamRoles"] = state ? state.iamRoles : undefined;
inputs["kmsKeyArn"] = state ? state.kmsKeyArn : undefined;
inputs["neptuneClusterParameterGroupName"] = state ? state.neptuneClusterParameterGroupName : undefined;
inputs["neptuneSubnetGroupName"] = state ? state.neptuneSubnetGroupName : undefined;
inputs["port"] = state ? state.port : undefined;
inputs["preferredBackupWindow"] = state ? state.preferredBackupWindow : undefined;
inputs["preferredMaintenanceWindow"] = state ? state.preferredMaintenanceWindow : undefined;
inputs["readerEndpoint"] = state ? state.readerEndpoint : undefined;
inputs["replicationSourceIdentifier"] = state ? state.replicationSourceIdentifier : undefined;
inputs["skipFinalSnapshot"] = state ? state.skipFinalSnapshot : undefined;
inputs["snapshotIdentifier"] = state ? state.snapshotIdentifier : undefined;
inputs["storageEncrypted"] = state ? state.storageEncrypted : undefined;
inputs["tags"] = state ? state.tags : undefined;
inputs["tagsAll"] = state ? state.tagsAll : undefined;
inputs["vpcSecurityGroupIds"] = state ? state.vpcSecurityGroupIds : undefined;
} else {
const args = argsOrState as ClusterArgs | undefined;
inputs["applyImmediately"] = args ? args.applyImmediately : undefined;
inputs["availabilityZones"] = args ? args.availabilityZones : undefined;
inputs["backupRetentionPeriod"] = args ? args.backupRetentionPeriod : undefined;
inputs["clusterIdentifier"] = args ? args.clusterIdentifier : undefined;
inputs["clusterIdentifierPrefix"] = args ? args.clusterIdentifierPrefix : undefined;
inputs["copyTagsToSnapshot"] = args ? args.copyTagsToSnapshot : undefined;
inputs["deletionProtection"] = args ? args.deletionProtection : undefined;
inputs["enableCloudwatchLogsExports"] = args ? args.enableCloudwatchLogsExports : undefined;
inputs["engine"] = args ? args.engine : undefined;
inputs["engineVersion"] = args ? args.engineVersion : undefined;
inputs["finalSnapshotIdentifier"] = args ? args.finalSnapshotIdentifier : undefined;
inputs["iamDatabaseAuthenticationEnabled"] = args ? args.iamDatabaseAuthenticationEnabled : undefined;
inputs["iamRoles"] = args ? args.iamRoles : undefined;
inputs["kmsKeyArn"] = args ? args.kmsKeyArn : undefined;
inputs["neptuneClusterParameterGroupName"] = args ? args.neptuneClusterParameterGroupName : undefined;
inputs["neptuneSubnetGroupName"] = args ? args.neptuneSubnetGroupName : undefined;
inputs["port"] = args ? args.port : undefined;
inputs["preferredBackupWindow"] = args ? args.preferredBackupWindow : undefined;
inputs["preferredMaintenanceWindow"] = args ? args.preferredMaintenanceWindow : undefined;
inputs["replicationSourceIdentifier"] = args ? args.replicationSourceIdentifier : undefined;
inputs["skipFinalSnapshot"] = args ? args.skipFinalSnapshot : undefined;
inputs["snapshotIdentifier"] = args ? args.snapshotIdentifier : undefined;
inputs["storageEncrypted"] = args ? args.storageEncrypted : undefined;
inputs["tags"] = args ? args.tags : undefined;
inputs["vpcSecurityGroupIds"] = args ? args.vpcSecurityGroupIds : undefined;
inputs["arn"] = undefined /*out*/;
inputs["clusterMembers"] = undefined /*out*/;
inputs["clusterResourceId"] = undefined /*out*/;
inputs["endpoint"] = undefined /*out*/;
inputs["hostedZoneId"] = undefined /*out*/;
inputs["readerEndpoint"] = undefined /*out*/;
inputs["tagsAll"] = undefined /*out*/;
}
if (!opts.version) {
opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()});
}
super(Cluster.__pulumiType, name, inputs, opts);
}
}
/**
* Input properties used for looking up and filtering Cluster resources.
*/
export interface ClusterState {
/**
* Specifies whether any cluster modifications are applied immediately, or during the next maintenance window. Default is `false`.
*/
applyImmediately?: pulumi.Input<boolean>;
/**
* The Neptune Cluster Amazon Resource Name (ARN)
*/
arn?: pulumi.Input<string>;
/**
* A list of EC2 Availability Zones that instances in the Neptune cluster can be created in.
*/
availabilityZones?: pulumi.Input<pulumi.Input<string>[]>;
/**
* The days to retain backups for. Default `1`
*/
backupRetentionPeriod?: pulumi.Input<number>;
/**
* The cluster identifier. If omitted, this provider will assign a random, unique identifier.
*/
clusterIdentifier?: pulumi.Input<string>;
/**
* Creates a unique cluster identifier beginning with the specified prefix. Conflicts with `clusterIdentifier`.
*/
clusterIdentifierPrefix?: pulumi.Input<string>;
/**
* List of Neptune Instances that are a part of this cluster
*/
clusterMembers?: pulumi.Input<pulumi.Input<string>[]>;
/**
* The Neptune Cluster Resource ID
*/
clusterResourceId?: pulumi.Input<string>;
/**
* If set to true, tags are copied to any snapshot of the DB cluster that is created.
*/
copyTagsToSnapshot?: pulumi.Input<boolean>;
/**
* A value that indicates whether the DB cluster has deletion protection enabled.The database can't be deleted when deletion protection is enabled. By default, deletion protection is disabled.
*/
deletionProtection?: pulumi.Input<boolean>;
/**
* A list of the log types this DB cluster is configured to export to Cloudwatch Logs. Currently only supports `audit`.
*/
enableCloudwatchLogsExports?: pulumi.Input<pulumi.Input<string>[]>;
/**
* The DNS address of the Neptune instance
*/
endpoint?: pulumi.Input<string>;
/**
* The name of the database engine to be used for this Neptune cluster. Defaults to `neptune`.
*/
engine?: pulumi.Input<string>;
/**
* The database engine version.
*/
engineVersion?: pulumi.Input<string>;
/**
* The name of your final Neptune snapshot when this Neptune cluster is deleted. If omitted, no final snapshot will be made.
*/
finalSnapshotIdentifier?: pulumi.Input<string>;
/**
* The Route53 Hosted Zone ID of the endpoint
*/
hostedZoneId?: pulumi.Input<string>;
/**
* Specifies whether or mappings of AWS Identity and Access Management (IAM) accounts to database accounts is enabled.
*/
iamDatabaseAuthenticationEnabled?: pulumi.Input<boolean>;
/**
* A List of ARNs for the IAM roles to associate to the Neptune Cluster.
*/
iamRoles?: pulumi.Input<pulumi.Input<string>[]>;
/**
* The ARN for the KMS encryption key. When specifying `kmsKeyArn`, `storageEncrypted` needs to be set to true.
*/
kmsKeyArn?: pulumi.Input<string>;
/**
* A cluster parameter group to associate with the cluster.
*/
neptuneClusterParameterGroupName?: pulumi.Input<string>;
/**
* A Neptune subnet group to associate with this Neptune instance.
*/
neptuneSubnetGroupName?: pulumi.Input<string>;
/**
* The port on which the Neptune accepts connections. Default is `8182`.
*/
port?: pulumi.Input<number>;
/**
* The daily time range during which automated backups are created if automated backups are enabled using the BackupRetentionPeriod parameter. Time in UTC. Default: A 30-minute window selected at random from an 8-hour block of time per region. e.g. 04:00-09:00
*/
preferredBackupWindow?: pulumi.Input<string>;
/**
* The weekly time range during which system maintenance can occur, in (UTC) e.g. wed:04:00-wed:04:30
*/
preferredMaintenanceWindow?: pulumi.Input<string>;
/**
* A read-only endpoint for the Neptune cluster, automatically load-balanced across replicas
*/
readerEndpoint?: pulumi.Input<string>;
/**
* ARN of a source Neptune cluster or Neptune instance if this Neptune cluster is to be created as a Read Replica.
*/
replicationSourceIdentifier?: pulumi.Input<string>;
/**
* Determines whether a final Neptune snapshot is created before the Neptune cluster is deleted. If true is specified, no Neptune snapshot is created. If false is specified, a Neptune snapshot is created before the Neptune cluster is deleted, using the value from `finalSnapshotIdentifier`. Default is `false`.
*/
skipFinalSnapshot?: pulumi.Input<boolean>;
/**
* Specifies whether or not to create this cluster from a snapshot. You can use either the name or ARN when specifying a Neptune cluster snapshot, or the ARN when specifying a Neptune snapshot.
*/
snapshotIdentifier?: pulumi.Input<string>;
/**
* Specifies whether the Neptune cluster is encrypted. The default is `false` if not specified.
*/
storageEncrypted?: pulumi.Input<boolean>;
/**
* A map of tags to assign to the Neptune cluster. .If configured with a provider `defaultTags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
*/
tags?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* A map of tags assigned to the resource, including those inherited from the provider .
*/
tagsAll?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* List of VPC security groups to associate with the Cluster
*/
vpcSecurityGroupIds?: pulumi.Input<pulumi.Input<string>[]>;
}
/**
* The set of arguments for constructing a Cluster resource.
*/
export interface ClusterArgs {
/**
* Specifies whether any cluster modifications are applied immediately, or during the next maintenance window. Default is `false`.
*/
applyImmediately?: pulumi.Input<boolean>;
/**
* A list of EC2 Availability Zones that instances in the Neptune cluster can be created in.
*/
availabilityZones?: pulumi.Input<pulumi.Input<string>[]>;
/**
* The days to retain backups for. Default `1`
*/
backupRetentionPeriod?: pulumi.Input<number>;
/**
* The cluster identifier. If omitted, this provider will assign a random, unique identifier.
*/
clusterIdentifier?: pulumi.Input<string>;
/**
* Creates a unique cluster identifier beginning with the specified prefix. Conflicts with `clusterIdentifier`.
*/
clusterIdentifierPrefix?: pulumi.Input<string>;
/**
* If set to true, tags are copied to any snapshot of the DB cluster that is created.
*/
copyTagsToSnapshot?: pulumi.Input<boolean>;
/**
* A value that indicates whether the DB cluster has deletion protection enabled.The database can't be deleted when deletion protection is enabled. By default, deletion protection is disabled.
*/
deletionProtection?: pulumi.Input<boolean>;
/**
* A list of the log types this DB cluster is configured to export to Cloudwatch Logs. Currently only supports `audit`.
*/
enableCloudwatchLogsExports?: pulumi.Input<pulumi.Input<string>[]>;
/**
* The name of the database engine to be used for this Neptune cluster. Defaults to `neptune`.
*/
engine?: pulumi.Input<string>;
/**
* The database engine version.
*/
engineVersion?: pulumi.Input<string>;
/**
* The name of your final Neptune snapshot when this Neptune cluster is deleted. If omitted, no final snapshot will be made.
*/
finalSnapshotIdentifier?: pulumi.Input<string>;
/**
* Specifies whether or mappings of AWS Identity and Access Management (IAM) accounts to database accounts is enabled.
*/
iamDatabaseAuthenticationEnabled?: pulumi.Input<boolean>;
/**
* A List of ARNs for the IAM roles to associate to the Neptune Cluster.
*/
iamRoles?: pulumi.Input<pulumi.Input<string>[]>;
/**
* The ARN for the KMS encryption key. When specifying `kmsKeyArn`, `storageEncrypted` needs to be set to true.
*/
kmsKeyArn?: pulumi.Input<string>;
/**
* A cluster parameter group to associate with the cluster.
*/
neptuneClusterParameterGroupName?: pulumi.Input<string>;
/**
* A Neptune subnet group to associate with this Neptune instance.
*/
neptuneSubnetGroupName?: pulumi.Input<string>;
/**
* The port on which the Neptune accepts connections. Default is `8182`.
*/
port?: pulumi.Input<number>;
/**
* The daily time range during which automated backups are created if automated backups are enabled using the BackupRetentionPeriod parameter. Time in UTC. Default: A 30-minute window selected at random from an 8-hour block of time per region. e.g. 04:00-09:00
*/
preferredBackupWindow?: pulumi.Input<string>;
/**
* The weekly time range during which system maintenance can occur, in (UTC) e.g. wed:04:00-wed:04:30
*/
preferredMaintenanceWindow?: pulumi.Input<string>;
/**
* ARN of a source Neptune cluster or Neptune instance if this Neptune cluster is to be created as a Read Replica.
*/
replicationSourceIdentifier?: pulumi.Input<string>;
/**
* Determines whether a final Neptune snapshot is created before the Neptune cluster is deleted. If true is specified, no Neptune snapshot is created. If false is specified, a Neptune snapshot is created before the Neptune cluster is deleted, using the value from `finalSnapshotIdentifier`. Default is `false`.
*/
skipFinalSnapshot?: pulumi.Input<boolean>;
/**
* Specifies whether or not to create this cluster from a snapshot. You can use either the name or ARN when specifying a Neptune cluster snapshot, or the ARN when specifying a Neptune snapshot.
*/
snapshotIdentifier?: pulumi.Input<string>;
/**
* Specifies whether the Neptune cluster is encrypted. The default is `false` if not specified.
*/
storageEncrypted?: pulumi.Input<boolean>;
/**
* A map of tags to assign to the Neptune cluster. .If configured with a provider `defaultTags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
*/
tags?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* List of VPC security groups to associate with the Cluster
*/
vpcSecurityGroupIds?: pulumi.Input<pulumi.Input<string>[]>;
} | the_stack |
import React from 'react';
import classNames from 'classnames';
import PropTypes from 'prop-types';
import MonthFoundation, { MonthAdapter, MonthDayInfo, MonthFoundationProps, MonthFoundationState } from '@douyinfe/semi-foundation/datePicker/monthFoundation';
import { cssClasses, numbers } from '@douyinfe/semi-foundation/datePicker/constants';
import BaseComponent, { BaseProps } from '../_base/baseComponent';
import { isBefore, isAfter, isBetween, isSameDay } from '@douyinfe/semi-foundation/datePicker/_utils/index';
import { noop, stubFalse, isFunction } from 'lodash';
import { parseISO } from 'date-fns';
import { Locale } from '../locale/interface';
const prefixCls = cssClasses.PREFIX;
export interface MonthProps extends MonthFoundationProps, BaseProps {
forwardRef: React.Ref<any>;
locale: Locale['DatePicker'];
focusRecordsRef: React.RefObject<{ rangeStart: boolean; rangeEnd: boolean }>;
}
export type MonthState = MonthFoundationState;
export default class Month extends BaseComponent<MonthProps, MonthState> {
static propTypes = {
month: PropTypes.object,
selected: PropTypes.object,
rangeStart: PropTypes.string,
rangeEnd: PropTypes.string,
offsetRangeStart: PropTypes.string,
offsetRangeEnd: PropTypes.string,
onDayClick: PropTypes.func,
onDayHover: PropTypes.func,
weekStartsOn: PropTypes.number,
disabledDate: PropTypes.func,
weeksRowNum: PropTypes.number,
onWeeksRowNumChange: PropTypes.func,
renderDate: PropTypes.func,
renderFullDate: PropTypes.func,
hoverDay: PropTypes.string, // Real-time hover date
startDateOffset: PropTypes.func,
endDateOffset: PropTypes.func,
rangeInputFocus: PropTypes.oneOfType([PropTypes.string, PropTypes.bool]),
focusRecordsRef: PropTypes.object,
multiple: PropTypes.bool,
};
static defaultProps = {
month: new Date(),
selected: new Set(),
rangeStart: '',
rangeEnd: '',
onDayClick: noop,
onDayHover: noop,
onWeeksRowNumChange: noop,
weekStartsOn: numbers.WEEK_START_ON,
disabledDate: stubFalse,
weeksRowNum: 0,
};
monthRef: React.RefObject<HTMLDivElement>;
foundation: MonthFoundation;
constructor(props: MonthProps) {
super(props);
this.state = {
weekdays: [],
month: { weeks: [], monthText: '' },
todayText: '',
weeksRowNum: props.weeksRowNum,
};
this.monthRef = React.createRef();
}
get adapter(): MonthAdapter {
return {
...super.adapter,
updateToday: todayText => this.setState({ todayText }),
setWeekDays: weekdays => this.setState({ weekdays }),
setWeeksRowNum: (weeksRowNum, callback) => this.setState({ weeksRowNum }, callback),
updateMonthTable: month => this.setState({ month }),
notifyDayClick: day => this.props.onDayClick(day),
notifyDayHover: day => this.props.onDayHover(day),
notifyWeeksRowNumChange: weeksRowNum => this.props.onWeeksRowNumChange(weeksRowNum),
};
}
componentDidMount() {
this.foundation = new MonthFoundation(this.adapter);
this.foundation.init();
}
componentWillUnmount() {
this.foundation.destroy();
}
componentDidUpdate(prevProps: MonthProps, prevState: MonthState) {
if (prevProps.month !== this.props.month) {
this.foundation.getMonthTable();
}
}
getSingleDayStatus(options: Partial<MonthProps> & { fullDate: string; todayText: string }) {
const { fullDate, todayText, selected, disabledDate, rangeStart, rangeEnd } = options;
const disabledOptions = { rangeStart, rangeEnd };
const isToday = fullDate === todayText;
const isSelected = selected.has(fullDate);
let isDisabled = disabledDate && disabledDate(parseISO(fullDate), disabledOptions);
if (
!isDisabled &&
this.props.rangeInputFocus === 'rangeStart' &&
rangeEnd &&
this.props.focusRecordsRef &&
this.props.focusRecordsRef.current.rangeEnd
) {
// The reason for splitting is that the dateRangeTime format: 'yyyy-MM-dd HH:MM:SS'
isDisabled = isAfter(fullDate, rangeEnd.trim().split(/\s+/)[0]);
}
if (
!isDisabled &&
this.props.rangeInputFocus === 'rangeEnd' &&
rangeStart &&
this.props.focusRecordsRef &&
this.props.focusRecordsRef.current.rangeStart
) {
// The reason for splitting is that the dateRangeTime format: 'yyyy-MM-dd HH:MM:SS'
isDisabled = isBefore(fullDate, rangeStart.trim().split(/\s+/)[0]);
}
return {
isToday, // Today
isSelected, // Selected
isDisabled // Disabled
};
}
getDateRangeStatus(options: Partial<MonthProps> & { fullDate: string }) {
const { rangeStart, rangeEnd, fullDate, hoverDay, offsetRangeStart, offsetRangeEnd, rangeInputFocus } = options;
// If no item is selected, return the empty object directly
const _isDateRangeAnySelected = Boolean(rangeStart || rangeEnd);
const _isDateRangeSelected = Boolean(rangeStart && rangeEnd);
const _isOffsetDateRangeAnyExist = offsetRangeStart || offsetRangeEnd;
if (!_isDateRangeAnySelected) {
return ({});
}
// The range selects the hover date, and the normal hover is .semi-datepicker-main: hover
const _isHoverDay = isSameDay(hoverDay, fullDate);
// When one is selected
// eslint-disable-next-line one-var
let _isHoverAfterStart, _isHoverBeforeEnd, isSelectedStart, isSelectedEnd, isHoverDayAroundOneSelected;
if (rangeStart) {
isSelectedStart = isSameDay(fullDate, rangeStart);
if (rangeInputFocus === 'rangeEnd') {
_isHoverAfterStart = isBetween(fullDate, { start: rangeStart, end: hoverDay });
}
}
if (rangeEnd) {
isSelectedEnd = isSameDay(fullDate, rangeEnd);
if (rangeInputFocus === 'rangeStart') {
_isHoverBeforeEnd = isBetween(fullDate, { start: hoverDay, end: rangeEnd });
}
}
if (!_isDateRangeSelected && _isDateRangeAnySelected) {
isHoverDayAroundOneSelected = _isHoverDay;
}
// eslint-disable-next-line one-var
let isHover;
if (!_isOffsetDateRangeAnyExist) {
isHover = _isHoverAfterStart || _isHoverBeforeEnd || _isHoverDay;
}
// Select all
// eslint-disable-next-line one-var
let isInRange, isSelectedStartAfterHover, isSelectedEndBeforeHover, isHoverDayInStartSelection, isHoverDayInEndSelection, isHoverDayInRange;
if (_isDateRangeSelected) {
isInRange = isBetween(fullDate, { start: rangeStart, end: rangeEnd });
if (!_isOffsetDateRangeAnyExist) {
isSelectedStartAfterHover = isSelectedStart && isAfter(rangeStart, hoverDay);
isSelectedEndBeforeHover = isSelectedEnd && isBefore(rangeEnd, hoverDay);
isHoverDayInStartSelection = _isHoverDay && rangeInputFocus === 'rangeStart';
isHoverDayInEndSelection = _isHoverDay && rangeInputFocus === 'rangeEnd';
isHoverDayInRange = _isHoverDay && isBetween(hoverDay, { start: rangeStart, end: rangeEnd });
}
}
return {
isHoverDay: _isHoverDay, // Is the current hover date
isSelectedStart, // Select Start
isSelectedEnd, // End of selection
isInRange, // Range within the selected date
isHover, // Date between selection and hover date
isSelectedStartAfterHover, // Choose to start behind the hover
isSelectedEndBeforeHover, // Choose to end in front of the hover
isHoverDayInRange, // Hover date within range
isHoverDayInStartSelection, // Hover date when starting Date is selected
isHoverDayInEndSelection, // Hover date when endDate is selected
isHoverDayAroundOneSelected, // Hover date and select a date
};
}
getOffsetDateStatus(options: Partial<MonthProps> & { fullDate: string }) {
const { offsetRangeStart, offsetRangeEnd, rangeStart, rangeEnd, fullDate, hoverDay } = options;
// When there is no offset, return the empty object directly
const _isOffsetDateRangeNull = !(offsetRangeStart || offsetRangeEnd);
if (_isOffsetDateRangeNull) {
return ({});
}
// Range Select base date
const _isInRange = isBetween(fullDate, { start: rangeStart, end: rangeEnd });
const _isHoverDay = isSameDay(hoverDay, fullDate);
const _isSelectedStart = rangeStart && isSameDay(fullDate, rangeStart);
const _isSelectedEnd = rangeEnd && isSameDay(fullDate, rangeEnd);
const _isDateRangeSelected = Boolean(rangeStart && rangeEnd);
// Determine whether it is offsetStart or offsetRangeEnd
const isOffsetRangeStart = isSameDay(fullDate, offsetRangeStart);
const isOffsetRangeEnd = isSameDay(fullDate, offsetRangeEnd);
const isHoverDayOffset = _isHoverDay;
// When selected
let isHoverInOffsetRange, isInOffsetRange;
if (_isDateRangeSelected) {
isHoverInOffsetRange = _isInRange && _isHoverDay;
}
// When there is an offset area
const _isOffsetDateRangeSelected = Boolean(offsetRangeStart && offsetRangeEnd);
if (_isOffsetDateRangeSelected) {
isInOffsetRange = (_isSelectedStart || isBetween(fullDate, { start: offsetRangeStart, end: offsetRangeEnd }) || _isSelectedEnd);
}
return {
isOffsetRangeStart, // Week selection start
isOffsetRangeEnd, // End of week selection
isHoverInOffsetRange, // Hover in the week selection
isHoverDayOffset, // Week selection hover day
isInOffsetRange // Include start and end within the week selection (start and end styles are the same as other dates, so start and end are included)
};
}
/**
* get day current status
* @param {Object} fullDate
* @param {Object} options
* @returns {Object}
*/
getDayStatus(currentDay: MonthDayInfo, options: MonthProps & { todayText: string }) {
const { fullDate } = currentDay;
const { hoverDay, rangeStart, rangeEnd, todayText, offsetRangeStart, offsetRangeEnd, disabledDate, selected, rangeInputFocus } = options;
const singleDayStatus = this.getSingleDayStatus({ fullDate, todayText, hoverDay, selected, disabledDate, rangeStart, rangeEnd });
const dateRangeStatus = this.getDateRangeStatus({ fullDate, rangeStart, rangeEnd, hoverDay, offsetRangeStart, offsetRangeEnd, rangeInputFocus, ...singleDayStatus });
const offsetDataStatus = this.getOffsetDateStatus({ offsetRangeStart, offsetRangeEnd, rangeStart, rangeEnd, fullDate, hoverDay, ...singleDayStatus, ...dateRangeStatus });
// this parameter will pass to the user when given renderFullDate function, do not delete or modify its key
const dayStatus = {
...singleDayStatus,
...dateRangeStatus,
...offsetDataStatus,
};
return dayStatus;
}
renderDayOfWeek() {
const { locale } = this.props;
const weekdayCls = classNames(cssClasses.WEEKDAY);
const weekdayItemCls = classNames(`${prefixCls}-weekday-item`);
const { weekdays } = this.state;
// i18n
const weekdaysText = weekdays.map(key => locale.weeks[key]);
return (
<div role="row" className={weekdayCls}>
{weekdaysText.map((E, i) => (
<div role="columnheader" key={E + i} className={weekdayItemCls}>
{E}
</div>
))}
</div>
);
}
renderWeeks() {
const { month } = this.state;
const { weeks } = month;
const { weeksRowNum } = this.props;
let style = {};
if (weeksRowNum) {
const height = weeksRowNum * numbers.WEEK_HEIGHT;
style = { height };
}
const weeksCls = classNames(cssClasses.WEEKS);
return (
<div className={weeksCls} style={style}>
{weeks.map((week, weekIndex) => this.renderWeek(week, weekIndex))}
</div>
);
}
renderWeek(week: MonthDayInfo[], weekIndex: number) {
const weekCls = cssClasses.WEEK;
return (
<div role="row" className={weekCls} key={weekIndex}>
{week.map((day, dayIndex) => this.renderDay(day, dayIndex))}
</div>
);
}
renderDay(day: MonthDayInfo, dayIndex: number) {
const { todayText } = this.state;
const { renderFullDate, renderDate } = this.props;
const { fullDate, dayNumber } = day;
if (!fullDate) {
return (
<div role="gridcell" tabIndex={-1} key={(dayNumber as number) + dayIndex} className={cssClasses.DAY}>
<span />
</div>
);
}
const dayStatus = this.getDayStatus(day, { todayText, ...this.props });
const dayCls = classNames(cssClasses.DAY, {
[cssClasses.DAY_TODAY]: dayStatus.isToday,
[cssClasses.DAY_IN_RANGE]: dayStatus.isInRange,
[cssClasses.DAY_HOVER]: dayStatus.isHover,
[cssClasses.DAY_SELECTED]: dayStatus.isSelected,
[cssClasses.DAY_SELECTED_START]: dayStatus.isSelectedStart,
[cssClasses.DAY_SELECTED_END]: dayStatus.isSelectedEnd,
[cssClasses.DAY_DISABLED]: dayStatus.isDisabled,
// offsetDate class
[cssClasses.DAY_HOVER_DAY]: dayStatus.isHoverDayOffset,
[cssClasses.DAY_IN_OFFSET_RANGE]: dayStatus.isInOffsetRange,
[cssClasses.DAY_SELECTED_RANGE_HOVER]: dayStatus.isHoverInOffsetRange,
[cssClasses.DAY_OFFSET_RANGE_START]: dayStatus.isOffsetRangeStart,
[cssClasses.DAY_OFFSET_RANGE_END]: dayStatus.isOffsetRangeEnd,
// range input class
[cssClasses.DAY_SELECTED_START_AFTER_HOVER]: dayStatus.isSelectedStartAfterHover,
[cssClasses.DAY_SELECTED_END_BEFORE_HOVER]: dayStatus.isSelectedEndBeforeHover,
[cssClasses.DAY_HOVER_DAY_BEFORE_RANGE]: dayStatus.isHoverDayInStartSelection,
[cssClasses.DAY_HOVER_DAY_AFTER_RANGE]: dayStatus.isHoverDayInEndSelection,
[cssClasses.DAY_HOVER_DAY_AROUND_SINGLE_SELECTED]: dayStatus.isHoverDayAroundOneSelected,
});
const dayMainCls = classNames({
[`${cssClasses.DAY}-main`]: true,
});
const fullDateArgs = [dayNumber, fullDate, dayStatus];
const customRender = isFunction(renderFullDate);
return (
<div
role="gridcell"
tabIndex={dayStatus.isDisabled ? -1 : 0}
aria-disabled={dayStatus.isDisabled}
aria-selected={dayStatus.isSelected}
aria-label={fullDate}
className={!customRender ? dayCls : cssClasses.DAY}
title={fullDate}
key={(dayNumber as number) + dayIndex}
onClick={e => !dayStatus.isDisabled && this.foundation.handleClick(day)}
onMouseEnter={() => this.foundation.handleHover(day)}
onMouseLeave={() => this.foundation.handleHover()}
>
{customRender ? renderFullDate(...fullDateArgs) : (
<div className={dayMainCls}>
{isFunction(renderDate) ? renderDate(dayNumber, fullDate) : <span>{dayNumber}</span>}
</div>
)}
</div>
);
}
render() {
const { forwardRef, multiple } = this.props;
const weekday = this.renderDayOfWeek();
const weeks = this.renderWeeks();
const monthCls = classNames(cssClasses.MONTH);
const ref = forwardRef || this.monthRef;
return (
<div role="grid" aria-multiselectable={multiple} ref={ref} className={monthCls} >
{weekday}
{weeks}
</div>
);
}
} | the_stack |
import fs from 'fs'
import path from 'path'
import { Plugin } from '../plugin'
import chalk from 'chalk'
import {
FS_PREFIX,
SPECIAL_QUERY_RE,
DEFAULT_EXTENSIONS,
DEFAULT_MAIN_FIELDS,
OPTIMIZABLE_ENTRY_RE
} from '../constants'
import {
isBuiltin,
bareImportRE,
createDebugger,
injectQuery,
isExternalUrl,
isObject,
normalizePath,
fsPathFromId,
ensureVolumeInPath,
resolveFrom,
isDataUrl,
cleanUrl,
slash,
nestedResolveFrom,
isFileReadable,
isTsRequest,
isPossibleTsOutput,
getTsSrcPath
} from '../utils'
import { ViteDevServer, SSROptions } from '..'
import { PartialResolvedId } from 'rollup'
import { resolve as _resolveExports } from 'resolve.exports'
import {
loadPackageData,
PackageCache,
PackageData,
resolvePackageData
} from '../packages'
// special id for paths marked with browser: false
// https://github.com/defunctzombie/package-browser-field-spec#ignore-a-module
export const browserExternalId = '__vite-browser-external'
const isDebug = process.env.DEBUG
const debug = createDebugger('vite:resolve-details', {
onlyWhenFocused: true
})
export interface ResolveOptions {
mainFields?: string[]
conditions?: string[]
extensions?: string[]
dedupe?: string[]
preserveSymlinks?: boolean
}
export interface InternalResolveOptions extends ResolveOptions {
root: string
isBuild: boolean
isProduction: boolean
ssrConfig?: SSROptions
packageCache?: PackageCache
/**
* src code mode also attempts the following:
* - resolving /xxx as URLs
* - resolving bare imports from optimized deps
*/
asSrc?: boolean
tryIndex?: boolean
tryPrefix?: string
skipPackageJson?: boolean
preferRelative?: boolean
preserveSymlinks?: boolean
isRequire?: boolean
// #3040
// when the importer is a ts module,
// if the specifier requests a non-existent `.js/jsx/mjs/cjs` file,
// should also try import from `.ts/tsx/mts/cts` source file as fallback.
isFromTsImporter?: boolean
tryEsmOnly?: boolean
}
export function resolvePlugin(baseOptions: InternalResolveOptions): Plugin {
const {
root,
isProduction,
asSrc,
ssrConfig,
preferRelative = false
} = baseOptions
let server: ViteDevServer | undefined
const { target: ssrTarget, noExternal: ssrNoExternal } = ssrConfig ?? {}
return {
name: 'vite:resolve',
configureServer(_server) {
server = _server
},
resolveId(id, importer, resolveOpts) {
const ssr = resolveOpts?.ssr === true
if (id.startsWith(browserExternalId)) {
return id
}
// fast path for commonjs proxy modules
if (/\?commonjs/.test(id) || id === 'commonjsHelpers.js') {
return
}
const targetWeb = !ssr || ssrTarget === 'webworker'
// this is passed by @rollup/plugin-commonjs
const isRequire: boolean =
resolveOpts?.custom?.['node-resolve']?.isRequire ?? false
const options: InternalResolveOptions = {
isRequire,
...baseOptions,
isFromTsImporter: isTsRequest(importer ?? '')
}
let res: string | PartialResolvedId | undefined
// explicit fs paths that starts with /@fs/*
if (asSrc && id.startsWith(FS_PREFIX)) {
const fsPath = fsPathFromId(id)
res = tryFsResolve(fsPath, options)
isDebug && debug(`[@fs] ${chalk.cyan(id)} -> ${chalk.dim(res)}`)
// always return here even if res doesn't exist since /@fs/ is explicit
// if the file doesn't exist it should be a 404
return res || fsPath
}
// URL
// /foo -> /fs-root/foo
if (asSrc && id.startsWith('/')) {
const fsPath = path.resolve(root, id.slice(1))
if ((res = tryFsResolve(fsPath, options))) {
isDebug && debug(`[url] ${chalk.cyan(id)} -> ${chalk.dim(res)}`)
return res
}
}
// relative
if (id.startsWith('.') || (preferRelative && /^\w/.test(id))) {
const basedir = importer ? path.dirname(importer) : process.cwd()
const fsPath = path.resolve(basedir, id)
// handle browser field mapping for relative imports
const normalizedFsPath = normalizePath(fsPath)
const pathFromBasedir = normalizedFsPath.slice(basedir.length)
if (pathFromBasedir.startsWith('/node_modules/')) {
// normalize direct imports from node_modules to bare imports, so the
// hashing logic is shared and we avoid duplicated modules #2503
const bareImport = pathFromBasedir.slice('/node_modules/'.length)
if (
(res = tryNodeResolve(
bareImport,
importer,
options,
targetWeb,
server,
ssr
)) &&
res.id.startsWith(normalizedFsPath)
) {
return res
}
}
if (
targetWeb &&
(res = tryResolveBrowserMapping(fsPath, importer, options, true))
) {
return res
}
if ((res = tryFsResolve(fsPath, options))) {
isDebug && debug(`[relative] ${chalk.cyan(id)} -> ${chalk.dim(res)}`)
const pkg = importer != null && idToPkgMap.get(importer)
if (pkg) {
idToPkgMap.set(res, pkg)
return {
id: res,
moduleSideEffects: pkg.hasSideEffects(res)
}
}
return res
}
}
// absolute fs paths
if (path.isAbsolute(id) && (res = tryFsResolve(id, options))) {
isDebug && debug(`[fs] ${chalk.cyan(id)} -> ${chalk.dim(res)}`)
return res
}
// external
if (isExternalUrl(id)) {
return {
id,
external: true
}
}
// data uri: pass through (this only happens during build and will be
// handled by dedicated plugin)
if (isDataUrl(id)) {
return null
}
// bare package imports, perform node resolve
if (bareImportRE.test(id)) {
if (
asSrc &&
server &&
!ssr &&
(res = tryOptimizedResolve(id, server, importer))
) {
return res
}
if (
targetWeb &&
(res = tryResolveBrowserMapping(id, importer, options, false))
) {
return res
}
if (
(res = tryNodeResolve(id, importer, options, targetWeb, server, ssr))
) {
return res
}
// node built-ins.
// externalize if building for SSR, otherwise redirect to empty module
if (isBuiltin(id)) {
if (ssr) {
if (ssrNoExternal === true) {
let message = `Cannot bundle Node.js built-in "${id}"`
if (importer) {
message += ` imported from "${path.relative(
process.cwd(),
importer
)}"`
}
message += `. Consider disabling ssr.noExternal or remove the built-in dependency.`
this.error(message)
}
return {
id,
external: true
}
} else {
if (!asSrc) {
debug(
`externalized node built-in "${id}" to empty module. ` +
`(imported by: ${chalk.white.dim(importer)})`
)
}
return isProduction
? browserExternalId
: `${browserExternalId}:${id}`
}
}
}
isDebug && debug(`[fallthrough] ${chalk.dim(id)}`)
},
load(id) {
if (id.startsWith(browserExternalId)) {
return isProduction
? `export default {}`
: `export default new Proxy({}, {
get() {
throw new Error('Module "${id.slice(
browserExternalId.length + 1
)}" has been externalized for browser compatibility and cannot be accessed in client code.')
}
})`
}
}
}
}
function tryFsResolve(
fsPath: string,
options: InternalResolveOptions,
tryIndex = true,
targetWeb = true
): string | undefined {
let file = fsPath
let postfix = ''
let postfixIndex = fsPath.indexOf('?')
if (postfixIndex < 0) {
postfixIndex = fsPath.indexOf('#')
}
if (postfixIndex > 0) {
file = fsPath.slice(0, postfixIndex)
postfix = fsPath.slice(postfixIndex)
}
let res: string | undefined
// if we fould postfix exist, we should first try resolving file with postfix. details see #4703.
if (
postfix &&
(res = tryResolveFile(
fsPath,
'',
options,
false,
targetWeb,
options.tryPrefix,
options.skipPackageJson
))
) {
return res
}
if (
(res = tryResolveFile(
file,
postfix,
options,
false,
targetWeb,
options.tryPrefix,
options.skipPackageJson
))
) {
return res
}
for (const ext of options.extensions || DEFAULT_EXTENSIONS) {
if (
postfix &&
(res = tryResolveFile(
fsPath + ext,
'',
options,
false,
targetWeb,
options.tryPrefix,
options.skipPackageJson
))
) {
return res
}
if (
(res = tryResolveFile(
file + ext,
postfix,
options,
false,
targetWeb,
options.tryPrefix,
options.skipPackageJson
))
) {
return res
}
}
if (
postfix &&
(res = tryResolveFile(
fsPath,
'',
options,
tryIndex,
targetWeb,
options.tryPrefix,
options.skipPackageJson
))
) {
return res
}
if (
(res = tryResolveFile(
file,
postfix,
options,
tryIndex,
targetWeb,
options.tryPrefix,
options.skipPackageJson
))
) {
return res
}
}
function tryResolveFile(
file: string,
postfix: string,
options: InternalResolveOptions,
tryIndex: boolean,
targetWeb: boolean,
tryPrefix?: string,
skipPackageJson?: boolean
): string | undefined {
// #2051 if we don't have read permission on a directory, existsSync() still
// works and will result in massively slow subsequent checks (which are
// unnecessary in the first place)
if (isFileReadable(file)) {
if (!fs.statSync(file).isDirectory()) {
return getRealPath(file, options.preserveSymlinks) + postfix
} else if (tryIndex) {
if (!skipPackageJson) {
const pkgPath = file + '/package.json'
try {
// path points to a node package
const pkg = loadPackageData(pkgPath, options.preserveSymlinks)
const resolved = resolvePackageEntry(file, pkg, targetWeb, options)
return resolved
} catch (e) {
if (e.code !== 'ENOENT') {
throw e
}
}
}
const index = tryFsResolve(file + '/index', options)
if (index) return index + postfix
}
}
const tryTsExtension = options.isFromTsImporter && isPossibleTsOutput(file)
if (tryTsExtension) {
const tsSrcPath = getTsSrcPath(file)
return tryResolveFile(
tsSrcPath,
postfix,
options,
tryIndex,
targetWeb,
tryPrefix,
skipPackageJson
)
}
if (tryPrefix) {
const prefixed = `${path.dirname(file)}/${tryPrefix}${path.basename(file)}`
return tryResolveFile(prefixed, postfix, options, tryIndex, targetWeb)
}
}
export const idToPkgMap = new Map<string, PackageData>()
export function tryNodeResolve(
id: string,
importer: string | null | undefined,
options: InternalResolveOptions,
targetWeb: boolean,
server?: ViteDevServer,
ssr?: boolean
): PartialResolvedId | undefined {
const { root, dedupe, isBuild, preserveSymlinks, packageCache } = options
// split id by last '>' for nested selected packages, for example:
// 'foo > bar > baz' => 'foo > bar' & 'baz'
// 'foo' => '' & 'foo'
const lastArrowIndex = id.lastIndexOf('>')
const nestedRoot = id.substring(0, lastArrowIndex).trim()
const nestedPath = id.substring(lastArrowIndex + 1).trim()
const possiblePkgIds: string[] = []
for (let prevSlashIndex = -1; ; ) {
let slashIndex = nestedPath.indexOf('/', prevSlashIndex + 1)
if (slashIndex < 0) {
slashIndex = nestedPath.length
}
const part = nestedPath.slice(
prevSlashIndex + 1,
(prevSlashIndex = slashIndex)
)
if (!part) {
break
}
// Assume path parts with an extension are not package roots, except for the
// first path part (since periods are sadly allowed in package names).
// At the same time, skip the first path part if it begins with "@"
// (since "@foo/bar" should be treated as the top-level path).
if (possiblePkgIds.length ? path.extname(part) : part[0] === '@') {
continue
}
const possiblePkgId = nestedPath.slice(0, slashIndex)
possiblePkgIds.push(possiblePkgId)
}
let basedir: string
if (dedupe?.some((id) => possiblePkgIds.includes(id))) {
basedir = root
} else if (
importer &&
path.isAbsolute(importer) &&
fs.existsSync(cleanUrl(importer))
) {
basedir = path.dirname(importer)
} else {
basedir = root
}
// nested node module, step-by-step resolve to the basedir of the nestedPath
if (nestedRoot) {
basedir = nestedResolveFrom(nestedRoot, basedir, preserveSymlinks)
}
let pkg: PackageData | undefined
const pkgId = possiblePkgIds.reverse().find((pkgId) => {
pkg = resolvePackageData(pkgId, basedir, preserveSymlinks, packageCache)!
return pkg
})!
if (!pkg) {
return
}
let resolveId = resolvePackageEntry
let unresolvedId = pkgId
if (unresolvedId !== nestedPath) {
resolveId = resolveDeepImport
unresolvedId = '.' + nestedPath.slice(pkgId.length)
}
let resolved: string | undefined
try {
resolved = resolveId(unresolvedId, pkg, targetWeb, options)
} catch (err) {
if (!options.tryEsmOnly) {
throw err
}
}
if (!resolved && options.tryEsmOnly) {
resolved = resolveId(unresolvedId, pkg, targetWeb, {
...options,
isRequire: false,
mainFields: DEFAULT_MAIN_FIELDS,
extensions: DEFAULT_EXTENSIONS
})
}
if (!resolved) {
return
}
// link id to pkg for browser field mapping check
idToPkgMap.set(resolved, pkg)
if (isBuild) {
// Resolve package side effects for build so that rollup can better
// perform tree-shaking
return {
id: resolved,
moduleSideEffects: pkg.hasSideEffects(resolved)
}
} else {
if (
!resolved.includes('node_modules') || // linked
!server || // build
server._isRunningOptimizer || // optimizing
!server._optimizeDepsMetadata
) {
return { id: resolved }
}
// if we reach here, it's a valid dep import that hasn't been optimized.
const isJsType = OPTIMIZABLE_ENTRY_RE.test(resolved)
const exclude = server.config.optimizeDeps?.exclude
if (
!isJsType ||
importer?.includes('node_modules') ||
exclude?.includes(pkgId) ||
exclude?.includes(nestedPath) ||
SPECIAL_QUERY_RE.test(resolved) ||
ssr
) {
// excluded from optimization
// Inject a version query to npm deps so that the browser
// can cache it without re-validation, but only do so for known js types.
// otherwise we may introduce duplicated modules for externalized files
// from pre-bundled deps.
const versionHash = server._optimizeDepsMetadata?.browserHash
if (versionHash && isJsType) {
resolved = injectQuery(resolved, `v=${versionHash}`)
}
} else {
// this is a missing import.
// queue optimize-deps re-run.
server._registerMissingImport?.(id, resolved, ssr)
}
return { id: resolved }
}
}
export function tryOptimizedResolve(
id: string,
server: ViteDevServer,
importer?: string
): string | undefined {
const cacheDir = server.config.cacheDir
const depData = server._optimizeDepsMetadata
if (!cacheDir || !depData) return
const getOptimizedUrl = (optimizedData: typeof depData.optimized[string]) => {
return (
optimizedData.file +
`?v=${depData.browserHash}${
optimizedData.needsInterop ? `&es-interop` : ``
}`
)
}
// check if id has been optimized
const isOptimized = depData.optimized[id]
if (isOptimized) {
return getOptimizedUrl(isOptimized)
}
if (!importer) return
// further check if id is imported by nested dependency
let resolvedSrc: string | undefined
for (const [pkgPath, optimizedData] of Object.entries(depData.optimized)) {
// check for scenarios, e.g.
// pkgPath => "my-lib > foo"
// id => "foo"
// this narrows the need to do a full resolve
if (!pkgPath.endsWith(id)) continue
// lazily initialize resolvedSrc
if (resolvedSrc == null) {
try {
// this may throw errors if unable to resolve, e.g. aliased id
resolvedSrc = normalizePath(resolveFrom(id, path.dirname(importer)))
} catch {
// this is best-effort only so swallow errors
break
}
}
// match by src to correctly identify if id belongs to nested dependency
if (optimizedData.src === resolvedSrc) {
return getOptimizedUrl(optimizedData)
}
}
}
export function resolvePackageEntry(
id: string,
{ dir, data, setResolvedCache, getResolvedCache }: PackageData,
targetWeb: boolean,
options: InternalResolveOptions
): string | undefined {
const cached = getResolvedCache('.', targetWeb)
if (cached) {
return cached
}
try {
let entryPoint: string | undefined | void
// resolve exports field with highest priority
// using https://github.com/lukeed/resolve.exports
if (data.exports) {
entryPoint = resolveExports(data, '.', options, targetWeb)
}
// if exports resolved to .mjs, still resolve other fields.
// This is because .mjs files can technically import .cjs files which would
// make them invalid for pure ESM environments - so if other module/browser
// fields are present, prioritize those instead.
if (targetWeb && (!entryPoint || entryPoint.endsWith('.mjs'))) {
// check browser field
// https://github.com/defunctzombie/package-browser-field-spec
const browserEntry =
typeof data.browser === 'string'
? data.browser
: isObject(data.browser) && data.browser['.']
if (browserEntry) {
// check if the package also has a "module" field.
if (typeof data.module === 'string' && data.module !== browserEntry) {
// if both are present, we may have a problem: some package points both
// to ESM, with "module" targeting Node.js, while some packages points
// "module" to browser ESM and "browser" to UMD.
// the heuristics here is to actually read the browser entry when
// possible and check for hints of UMD. If it is UMD, prefer "module"
// instead; Otherwise, assume it's ESM and use it.
const resolvedBrowserEntry = tryFsResolve(
path.join(dir, browserEntry),
options
)
if (resolvedBrowserEntry) {
const content = fs.readFileSync(resolvedBrowserEntry, 'utf-8')
if (
(/typeof exports\s*==/.test(content) &&
/typeof module\s*==/.test(content)) ||
/module\.exports\s*=/.test(content)
) {
// likely UMD or CJS(!!! e.g. firebase 7.x), prefer module
entryPoint = data.module
}
}
} else {
entryPoint = browserEntry
}
}
}
if (!entryPoint || entryPoint.endsWith('.mjs')) {
for (const field of options.mainFields || DEFAULT_MAIN_FIELDS) {
if (typeof data[field] === 'string') {
entryPoint = data[field]
break
}
}
}
entryPoint = entryPoint || data.main || 'index.js'
// make sure we don't get scripts when looking for sass
if (
options.mainFields?.[0] === 'sass' &&
!options.extensions?.includes(path.extname(entryPoint))
) {
entryPoint = ''
options.skipPackageJson = true
}
// resolve object browser field in package.json
const { browser: browserField } = data
if (targetWeb && isObject(browserField)) {
entryPoint = mapWithBrowserField(entryPoint, browserField) || entryPoint
}
entryPoint = path.join(dir, entryPoint)
const resolvedEntryPoint = tryFsResolve(entryPoint, options)
if (resolvedEntryPoint) {
isDebug &&
debug(
`[package entry] ${chalk.cyan(id)} -> ${chalk.dim(
resolvedEntryPoint
)}`
)
setResolvedCache('.', resolvedEntryPoint, targetWeb)
return resolvedEntryPoint
} else {
packageEntryFailure(id)
}
} catch (e) {
packageEntryFailure(id, e.message)
}
}
function packageEntryFailure(id: string, details?: string) {
throw new Error(
`Failed to resolve entry for package "${id}". ` +
`The package may have incorrect main/module/exports specified in its package.json` +
(details ? ': ' + details : '.')
)
}
function resolveExports(
pkg: PackageData['data'],
key: string,
options: InternalResolveOptions,
targetWeb: boolean
) {
const conditions = [options.isProduction ? 'production' : 'development']
if (!options.isRequire) {
conditions.push('module')
}
if (options.conditions) {
conditions.push(...options.conditions)
}
return _resolveExports(pkg, key, {
browser: targetWeb,
require: options.isRequire,
conditions
})
}
function resolveDeepImport(
id: string,
{
webResolvedImports,
setResolvedCache,
getResolvedCache,
dir,
data
}: PackageData,
targetWeb: boolean,
options: InternalResolveOptions
): string | undefined {
const cache = getResolvedCache(id, targetWeb)
if (cache) {
return cache
}
let relativeId: string | undefined | void = id
const { exports: exportsField, browser: browserField } = data
// map relative based on exports data
if (exportsField) {
if (isObject(exportsField) && !Array.isArray(exportsField)) {
relativeId = resolveExports(data, relativeId, options, targetWeb)
} else {
// not exposed
relativeId = undefined
}
if (!relativeId) {
throw new Error(
`Package subpath '${relativeId}' is not defined by "exports" in ` +
`${path.join(dir, 'package.json')}.`
)
}
} else if (targetWeb && isObject(browserField)) {
const mapped = mapWithBrowserField(relativeId, browserField)
if (mapped) {
relativeId = mapped
} else if (mapped === false) {
return (webResolvedImports[id] = browserExternalId)
}
}
if (relativeId) {
const resolved = tryFsResolve(
path.join(dir, relativeId),
options,
!exportsField, // try index only if no exports field
targetWeb
)
if (resolved) {
isDebug &&
debug(`[node/deep-import] ${chalk.cyan(id)} -> ${chalk.dim(resolved)}`)
setResolvedCache(id, resolved, targetWeb)
return resolved
}
}
}
function tryResolveBrowserMapping(
id: string,
importer: string | undefined,
options: InternalResolveOptions,
isFilePath: boolean
) {
let res: string | undefined
const pkg = importer && idToPkgMap.get(importer)
if (pkg && isObject(pkg.data.browser)) {
const mapId = isFilePath ? './' + slash(path.relative(pkg.dir, id)) : id
const browserMappedPath = mapWithBrowserField(mapId, pkg.data.browser)
if (browserMappedPath) {
const fsPath = path.join(pkg.dir, browserMappedPath)
if ((res = tryFsResolve(fsPath, options))) {
isDebug &&
debug(`[browser mapped] ${chalk.cyan(id)} -> ${chalk.dim(res)}`)
idToPkgMap.set(res, pkg)
return {
id: res,
moduleSideEffects: pkg.hasSideEffects(res)
}
}
} else if (browserMappedPath === false) {
return browserExternalId
}
}
}
/**
* given a relative path in pkg dir,
* return a relative path in pkg dir,
* mapped with the "map" object
*
* - Returning `undefined` means there is no browser mapping for this id
* - Returning `false` means this id is explicitly externalized for browser
*/
function mapWithBrowserField(
relativePathInPkgDir: string,
map: Record<string, string | false>
): string | false | undefined {
const normalizedPath = path.posix.normalize(relativePathInPkgDir)
for (const key in map) {
const normalizedKey = path.posix.normalize(key)
if (
normalizedPath === normalizedKey ||
equalWithoutSuffix(normalizedPath, normalizedKey, '.js') ||
equalWithoutSuffix(normalizedPath, normalizedKey, '/index.js')
) {
return map[key]
}
}
}
function equalWithoutSuffix(path: string, key: string, suffix: string) {
return key.endsWith(suffix) && key.slice(0, -suffix.length) === path
}
function getRealPath(resolved: string, preserveSymlinks?: boolean): string {
resolved = ensureVolumeInPath(resolved)
if (!preserveSymlinks && browserExternalId !== resolved) {
resolved = fs.realpathSync(resolved)
}
return normalizePath(resolved)
} | the_stack |
* Copyright (c) 2018 THL A29 Limited, a Tencent company. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { AbstractClient } from "../../../common/abstract_client"
import { ClientConfig } from "../../../common/interface"
import {
CreateDBInstancesRequest,
SetAutoRenewFlagRequest,
DescribeDatabasesRequest,
DescribeDBXlogsRequest,
DescribeAccountsRequest,
DeleteReadOnlyGroupResponse,
CreateInstancesRequest,
SpecItemInfo,
ModifyDBInstanceReadOnlyGroupResponse,
OpenServerlessDBExtranetAccessRequest,
RenewInstanceResponse,
DeleteServerlessDBInstanceResponse,
ModifyReadOnlyGroupConfigRequest,
AddDBInstanceToReadOnlyGroupRequest,
DescribeProductConfigRequest,
InitDBInstancesResponse,
RenewInstanceRequest,
RebalanceReadOnlyGroupRequest,
DescribeRegionsResponse,
DBInstanceNetInfo,
ZoneInfo,
InquiryPriceCreateDBInstancesRequest,
NormalQueryItem,
Tag,
DescribeDBInstanceAttributeRequest,
ModifyDBInstancesProjectResponse,
ServerlessDBAccount,
ModifyDBInstanceReadOnlyGroupRequest,
AddDBInstanceToReadOnlyGroupResponse,
CreateReadOnlyDBInstanceResponse,
DurationAnalysis,
DescribeOrdersResponse,
InquiryPriceCreateDBInstancesResponse,
ModifySwitchTimePeriodResponse,
DisIsolateDBInstancesRequest,
ServerlessDBInstanceNetInfo,
DescribeDBInstancesRequest,
ModifyAccountRemarkResponse,
UpgradeDBInstanceRequest,
DescribeZonesRequest,
DescribeReadOnlyGroupsResponse,
SetAutoRenewFlagResponse,
SpecInfo,
ResetAccountPasswordResponse,
CloseServerlessDBExtranetAccessRequest,
ReadOnlyGroup,
PgDeal,
DescribeDBErrlogsRequest,
DestroyDBInstanceRequest,
CreateInstancesResponse,
AnalysisItems,
Xlog,
DescribeServerlessDBInstancesRequest,
DescribeDBBackupsResponse,
DescribeRegionsRequest,
DescribeSlowQueryListResponse,
ServerlessDBInstance,
CreateReadOnlyGroupRequest,
CloseServerlessDBExtranetAccessResponse,
RestartDBInstanceRequest,
Detail,
IsolateDBInstancesResponse,
OpenDBExtranetAccessResponse,
InquiryPriceUpgradeDBInstanceRequest,
RebalanceReadOnlyGroupResponse,
ModifyDBInstanceNameRequest,
InquiryPriceRenewDBInstanceResponse,
DescribeSlowQueryAnalysisRequest,
ErrLogDetail,
DescribeServerlessDBInstancesResponse,
IsolateDBInstancesRequest,
InitDBInstancesRequest,
DBInstance,
DescribeProductConfigResponse,
DeleteReadOnlyGroupRequest,
DescribeDBBackupsRequest,
Filter,
DisIsolateDBInstancesResponse,
OpenDBExtranetAccessRequest,
SlowlogDetail,
RemoveDBInstanceFromReadOnlyGroupRequest,
ModifyDBInstanceNameResponse,
CloseDBExtranetAccessResponse,
CreateReadOnlyDBInstanceRequest,
DescribeZonesResponse,
CreateServerlessDBInstanceResponse,
DescribeDatabasesResponse,
DescribeOrdersRequest,
ModifyAccountRemarkRequest,
CloseDBExtranetAccessRequest,
CreateServerlessDBInstanceRequest,
InquiryPriceRenewDBInstanceRequest,
CreateReadOnlyGroupResponse,
DeleteServerlessDBInstanceRequest,
ModifyReadOnlyGroupConfigResponse,
AccountInfo,
DBBackup,
DescribeDBErrlogsResponse,
InquiryPriceUpgradeDBInstanceResponse,
ModifySwitchTimePeriodRequest,
DescribeSlowQueryListRequest,
RegionInfo,
RestartDBInstanceResponse,
DescribeDBInstancesResponse,
RemoveDBInstanceFromReadOnlyGroupResponse,
ResetAccountPasswordRequest,
DescribeSlowQueryAnalysisResponse,
RawSlowQuery,
DescribeReadOnlyGroupsRequest,
DescribeAccountsResponse,
UpgradeDBInstanceResponse,
ModifyDBInstancesProjectRequest,
DescribeDBSlowlogsRequest,
DestroyDBInstanceResponse,
DescribeDBInstanceAttributeResponse,
OpenServerlessDBExtranetAccessResponse,
DescribeDBXlogsResponse,
DescribeDBSlowlogsResponse,
CreateDBInstancesResponse,
} from "./postgres_models"
/**
* postgres client
* @class
*/
export class Client extends AbstractClient {
constructor(clientConfig: ClientConfig) {
super("postgres.tencentcloudapi.com", "2017-03-12", clientConfig)
}
/**
* 本接口(ModifyDBInstancesProject)用于将实例转至其他项目。
*/
async ModifyDBInstancesProject(
req: ModifyDBInstancesProjectRequest,
cb?: (error: string, rep: ModifyDBInstancesProjectResponse) => void
): Promise<ModifyDBInstancesProjectResponse> {
return this.request("ModifyDBInstancesProject", req, cb)
}
/**
* 本接口(DescribeOrders)用于获取订单信息。
*/
async DescribeOrders(
req: DescribeOrdersRequest,
cb?: (error: string, rep: DescribeOrdersResponse) => void
): Promise<DescribeOrdersResponse> {
return this.request("DescribeOrders", req, cb)
}
/**
* 本接口 (DestroyDBInstance) 用于彻底下线指定DBInstanceId对应的实例,下线后实例数据将彻底删除,无法找回,只能下线隔离中的实例。
*/
async DestroyDBInstance(
req: DestroyDBInstanceRequest,
cb?: (error: string, rep: DestroyDBInstanceResponse) => void
): Promise<DestroyDBInstanceResponse> {
return this.request("DestroyDBInstance", req, cb)
}
/**
* 本接口(DescribeDBBackups)用于查询实例备份列表。
*/
async DescribeDBBackups(
req: DescribeDBBackupsRequest,
cb?: (error: string, rep: DescribeDBBackupsResponse) => void
): Promise<DescribeDBBackupsResponse> {
return this.request("DescribeDBBackups", req, cb)
}
/**
* 本接口(ResetAccountPassword)用于重置实例的账户密码。
*/
async ResetAccountPassword(
req: ResetAccountPasswordRequest,
cb?: (error: string, rep: ResetAccountPasswordResponse) => void
): Promise<ResetAccountPasswordResponse> {
return this.request("ResetAccountPassword", req, cb)
}
/**
* 本接口(DescribeDBErrlogs)用于获取错误日志。
*/
async DescribeDBErrlogs(
req: DescribeDBErrlogsRequest,
cb?: (error: string, rep: DescribeDBErrlogsResponse) => void
): Promise<DescribeDBErrlogsResponse> {
return this.request("DescribeDBErrlogs", req, cb)
}
/**
* 此接口(DescribeSlowQueryAnalysis)用于统计指定时间范围内的所有慢查询,根据SQL语句抽象参数后,进行聚合分析,并返回同类SQL列表。
*/
async DescribeSlowQueryAnalysis(
req: DescribeSlowQueryAnalysisRequest,
cb?: (error: string, rep: DescribeSlowQueryAnalysisResponse) => void
): Promise<DescribeSlowQueryAnalysisResponse> {
return this.request("DescribeSlowQueryAnalysis", req, cb)
}
/**
* 当升级完成后,对处于等待切换状态下的实例,强制实例立即切换。
*/
async ModifySwitchTimePeriod(
req: ModifySwitchTimePeriodRequest,
cb?: (error: string, rep: ModifySwitchTimePeriodResponse) => void
): Promise<ModifySwitchTimePeriodResponse> {
return this.request("ModifySwitchTimePeriod", req, cb)
}
/**
* 本接口 (InquiryPriceCreateDBInstances) 用于查询购买一个或多个实例的价格信息。
*/
async InquiryPriceCreateDBInstances(
req: InquiryPriceCreateDBInstancesRequest,
cb?: (error: string, rep: InquiryPriceCreateDBInstancesResponse) => void
): Promise<InquiryPriceCreateDBInstancesResponse> {
return this.request("InquiryPriceCreateDBInstances", req, cb)
}
/**
* 本接口(OpenDBExtranetAccess)用于开通外网。
*/
async OpenDBExtranetAccess(
req: OpenDBExtranetAccessRequest,
cb?: (error: string, rep: OpenDBExtranetAccessResponse) => void
): Promise<OpenDBExtranetAccessResponse> {
return this.request("OpenDBExtranetAccess", req, cb)
}
/**
* 关闭serverlessDB实例外网
*/
async CloseServerlessDBExtranetAccess(
req: CloseServerlessDBExtranetAccessRequest,
cb?: (error: string, rep: CloseServerlessDBExtranetAccessResponse) => void
): Promise<CloseServerlessDBExtranetAccessResponse> {
return this.request("CloseServerlessDBExtranetAccess", req, cb)
}
/**
* 本接口(DeleteReadOnlyGroup)用于删除指定的只读组
*/
async DeleteReadOnlyGroup(
req: DeleteReadOnlyGroupRequest,
cb?: (error: string, rep: DeleteReadOnlyGroupResponse) => void
): Promise<DeleteReadOnlyGroupResponse> {
return this.request("DeleteReadOnlyGroup", req, cb)
}
/**
* 本接口(ModifyAccountRemark)用于修改帐号备注。
*/
async ModifyAccountRemark(
req: ModifyAccountRemarkRequest,
cb?: (error: string, rep: ModifyAccountRemarkResponse) => void
): Promise<ModifyAccountRemarkResponse> {
return this.request("ModifyAccountRemark", req, cb)
}
/**
* 本接口(DescribeDBXlogs)用于获取实例Xlog列表。
*/
async DescribeDBXlogs(
req: DescribeDBXlogsRequest,
cb?: (error: string, rep: DescribeDBXlogsResponse) => void
): Promise<DescribeDBXlogsResponse> {
return this.request("DescribeDBXlogs", req, cb)
}
/**
* 本接口(SetAutoRenewFlag)用于设置自动续费。
*/
async SetAutoRenewFlag(
req: SetAutoRenewFlagRequest,
cb?: (error: string, rep: SetAutoRenewFlagResponse) => void
): Promise<SetAutoRenewFlagResponse> {
return this.request("SetAutoRenewFlag", req, cb)
}
/**
* 本接口 (DescribeDBInstanceAttribute) 用于查询某个实例的详情信息。
*/
async DescribeDBInstanceAttribute(
req: DescribeDBInstanceAttributeRequest,
cb?: (error: string, rep: DescribeDBInstanceAttributeResponse) => void
): Promise<DescribeDBInstanceAttributeResponse> {
return this.request("DescribeDBInstanceAttribute", req, cb)
}
/**
* 本接口(ModifyDBInstanceName)用于修改postgresql实例名字。
*/
async ModifyDBInstanceName(
req: ModifyDBInstanceNameRequest,
cb?: (error: string, rep: ModifyDBInstanceNameResponse) => void
): Promise<ModifyDBInstanceNameResponse> {
return this.request("ModifyDBInstanceName", req, cb)
}
/**
* 本接口(UpgradeDBInstance)用于升级实例配置。
*/
async UpgradeDBInstance(
req: UpgradeDBInstanceRequest,
cb?: (error: string, rep: UpgradeDBInstanceResponse) => void
): Promise<UpgradeDBInstanceResponse> {
return this.request("UpgradeDBInstance", req, cb)
}
/**
* 本接口 (CreateDBInstances) 用于创建一个或者多个PostgreSQL实例,仅发货实例不会进行初始化。
*/
async CreateDBInstances(
req: CreateDBInstancesRequest,
cb?: (error: string, rep: CreateDBInstancesResponse) => void
): Promise<CreateDBInstancesResponse> {
return this.request("CreateDBInstances", req, cb)
}
/**
* 本接口 (CreateServerlessDBInstance) 用于创建一个ServerlessDB实例,创建成功返回实例ID。
*/
async CreateServerlessDBInstance(
req: CreateServerlessDBInstanceRequest,
cb?: (error: string, rep: CreateServerlessDBInstanceResponse) => void
): Promise<CreateServerlessDBInstanceResponse> {
return this.request("CreateServerlessDBInstance", req, cb)
}
/**
* 本接口 (DescribeDBInstances) 用于查询一个或多个实例的详细信息。
*/
async DescribeDBInstances(
req: DescribeDBInstancesRequest,
cb?: (error: string, rep: DescribeDBInstancesResponse) => void
): Promise<DescribeDBInstancesResponse> {
return this.request("DescribeDBInstances", req, cb)
}
/**
* 本接口 (DescribeZones) 用于查询支持的可用区信息。
*/
async DescribeZones(
req?: DescribeZonesRequest,
cb?: (error: string, rep: DescribeZonesResponse) => void
): Promise<DescribeZonesResponse> {
return this.request("DescribeZones", req, cb)
}
/**
* 本接口(RestartDBInstance)用于重启实例。
*/
async RestartDBInstance(
req: RestartDBInstanceRequest,
cb?: (error: string, rep: RestartDBInstanceResponse) => void
): Promise<RestartDBInstanceResponse> {
return this.request("RestartDBInstance", req, cb)
}
/**
* 本接口(IsolateDBInstances)用于隔离实例
*/
async IsolateDBInstances(
req: IsolateDBInstancesRequest,
cb?: (error: string, rep: IsolateDBInstancesResponse) => void
): Promise<IsolateDBInstancesResponse> {
return this.request("IsolateDBInstances", req, cb)
}
/**
* 本接口 (DeleteServerlessDBInstance) 用于删除一个ServerlessDB实例。
*/
async DeleteServerlessDBInstance(
req: DeleteServerlessDBInstanceRequest,
cb?: (error: string, rep: DeleteServerlessDBInstanceResponse) => void
): Promise<DeleteServerlessDBInstanceResponse> {
return this.request("DeleteServerlessDBInstance", req, cb)
}
/**
* 本接口 (InitDBInstances) 用于初始化云数据库PostgreSQL实例。
*/
async InitDBInstances(
req: InitDBInstancesRequest,
cb?: (error: string, rep: InitDBInstancesResponse) => void
): Promise<InitDBInstancesResponse> {
return this.request("InitDBInstances", req, cb)
}
/**
* 本接口(InquiryPriceUpgradeDBInstance)用于查询升级实例的价格。
*/
async InquiryPriceUpgradeDBInstance(
req: InquiryPriceUpgradeDBInstanceRequest,
cb?: (error: string, rep: InquiryPriceUpgradeDBInstanceResponse) => void
): Promise<InquiryPriceUpgradeDBInstanceResponse> {
return this.request("InquiryPriceUpgradeDBInstance", req, cb)
}
/**
* 本接口(RemoveDBInstanceFromReadOnlyGroup)用户将只读实例从只读组中移除
*/
async RemoveDBInstanceFromReadOnlyGroup(
req: RemoveDBInstanceFromReadOnlyGroupRequest,
cb?: (error: string, rep: RemoveDBInstanceFromReadOnlyGroupResponse) => void
): Promise<RemoveDBInstanceFromReadOnlyGroupResponse> {
return this.request("RemoveDBInstanceFromReadOnlyGroup", req, cb)
}
/**
* 本接口(CreateReadOnlyGroup)用于创建只读组
*/
async CreateReadOnlyGroup(
req: CreateReadOnlyGroupRequest,
cb?: (error: string, rep: CreateReadOnlyGroupResponse) => void
): Promise<CreateReadOnlyGroupResponse> {
return this.request("CreateReadOnlyGroup", req, cb)
}
/**
* 本接口 (DescribeRegions) 用于查询售卖地域信息。
*/
async DescribeRegions(
req?: DescribeRegionsRequest,
cb?: (error: string, rep: DescribeRegionsResponse) => void
): Promise<DescribeRegionsResponse> {
return this.request("DescribeRegions", req, cb)
}
/**
* 此接口(DescribeSlowQueryList)用于查询指定时间范围内的所有慢查询。
*/
async DescribeSlowQueryList(
req: DescribeSlowQueryListRequest,
cb?: (error: string, rep: DescribeSlowQueryListResponse) => void
): Promise<DescribeSlowQueryListResponse> {
return this.request("DescribeSlowQueryList", req, cb)
}
/**
* 本接口(ModifyDBInstanceReadOnlyGroup)用于修改实例所属的只读组
*/
async ModifyDBInstanceReadOnlyGroup(
req: ModifyDBInstanceReadOnlyGroupRequest,
cb?: (error: string, rep: ModifyDBInstanceReadOnlyGroupResponse) => void
): Promise<ModifyDBInstanceReadOnlyGroupResponse> {
return this.request("ModifyDBInstanceReadOnlyGroup", req, cb)
}
/**
* 本接口(CloseDBExtranetAccess)用于关闭实例外网链接。
*/
async CloseDBExtranetAccess(
req: CloseDBExtranetAccessRequest,
cb?: (error: string, rep: CloseDBExtranetAccessResponse) => void
): Promise<CloseDBExtranetAccessResponse> {
return this.request("CloseDBExtranetAccess", req, cb)
}
/**
* 本接口(AddDBInstanceToReadOnlyGroup)用于添加只读实例到只读组
*/
async AddDBInstanceToReadOnlyGroup(
req: AddDBInstanceToReadOnlyGroupRequest,
cb?: (error: string, rep: AddDBInstanceToReadOnlyGroupResponse) => void
): Promise<AddDBInstanceToReadOnlyGroupResponse> {
return this.request("AddDBInstanceToReadOnlyGroup", req, cb)
}
/**
* 本接口(InquiryPriceRenewDBInstance)用于查询续费实例的价格。
*/
async InquiryPriceRenewDBInstance(
req: InquiryPriceRenewDBInstanceRequest,
cb?: (error: string, rep: InquiryPriceRenewDBInstanceResponse) => void
): Promise<InquiryPriceRenewDBInstanceResponse> {
return this.request("InquiryPriceRenewDBInstance", req, cb)
}
/**
* 本接口(DescribeAccounts)用于获取实例用户列表。
*/
async DescribeAccounts(
req: DescribeAccountsRequest,
cb?: (error: string, rep: DescribeAccountsResponse) => void
): Promise<DescribeAccountsResponse> {
return this.request("DescribeAccounts", req, cb)
}
/**
* 本接口(DisIsolateDBInstances)用于解隔离实例
*/
async DisIsolateDBInstances(
req: DisIsolateDBInstancesRequest,
cb?: (error: string, rep: DisIsolateDBInstancesResponse) => void
): Promise<DisIsolateDBInstancesResponse> {
return this.request("DisIsolateDBInstances", req, cb)
}
/**
* 用于查询一个或多个serverlessDB实例的详细信息
*/
async DescribeServerlessDBInstances(
req: DescribeServerlessDBInstancesRequest,
cb?: (error: string, rep: DescribeServerlessDBInstancesResponse) => void
): Promise<DescribeServerlessDBInstancesResponse> {
return this.request("DescribeServerlessDBInstances", req, cb)
}
/**
* 本接口(RebalanceReadOnlyGroup)用于重新均衡 RO 组内实例的负载。注意,RO 组内 RO 实例会有一次数据库连接瞬断,请确保应用程序能重连数据库,谨慎操作。
*/
async RebalanceReadOnlyGroup(
req: RebalanceReadOnlyGroupRequest,
cb?: (error: string, rep: RebalanceReadOnlyGroupResponse) => void
): Promise<RebalanceReadOnlyGroupResponse> {
return this.request("RebalanceReadOnlyGroup", req, cb)
}
/**
* 本接口(RenewInstance)用于续费实例。
*/
async RenewInstance(
req: RenewInstanceRequest,
cb?: (error: string, rep: RenewInstanceResponse) => void
): Promise<RenewInstanceResponse> {
return this.request("RenewInstance", req, cb)
}
/**
* 接口(DescribeDatabases)用来拉取数据库列表
*/
async DescribeDatabases(
req: DescribeDatabasesRequest,
cb?: (error: string, rep: DescribeDatabasesResponse) => void
): Promise<DescribeDatabasesResponse> {
return this.request("DescribeDatabases", req, cb)
}
/**
* 本接口(CreateReadOnlyDBInstance)用于创建只读实例
*/
async CreateReadOnlyDBInstance(
req: CreateReadOnlyDBInstanceRequest,
cb?: (error: string, rep: CreateReadOnlyDBInstanceResponse) => void
): Promise<CreateReadOnlyDBInstanceResponse> {
return this.request("CreateReadOnlyDBInstance", req, cb)
}
/**
* 本接口(DescribeReadOnlyGroups)用于查询用户输入指定实例的只读组
*/
async DescribeReadOnlyGroups(
req: DescribeReadOnlyGroupsRequest,
cb?: (error: string, rep: DescribeReadOnlyGroupsResponse) => void
): Promise<DescribeReadOnlyGroupsResponse> {
return this.request("DescribeReadOnlyGroups", req, cb)
}
/**
* 本接口(ModifyReadOnlyGroupConfig)用于更新只读组配置信息
*/
async ModifyReadOnlyGroupConfig(
req: ModifyReadOnlyGroupConfigRequest,
cb?: (error: string, rep: ModifyReadOnlyGroupConfigResponse) => void
): Promise<ModifyReadOnlyGroupConfigResponse> {
return this.request("ModifyReadOnlyGroupConfig", req, cb)
}
/**
* 本接口 (CreateInstances) 用于创建一个或者多个PostgreSQL实例,通过此接口创建的实例无需进行初始化,可直接使用。
*/
async CreateInstances(
req: CreateInstancesRequest,
cb?: (error: string, rep: CreateInstancesResponse) => void
): Promise<CreateInstancesResponse> {
return this.request("CreateInstances", req, cb)
}
/**
* 本接口 (DescribeProductConfig) 用于查询售卖规格配置。
*/
async DescribeProductConfig(
req: DescribeProductConfigRequest,
cb?: (error: string, rep: DescribeProductConfigResponse) => void
): Promise<DescribeProductConfigResponse> {
return this.request("DescribeProductConfig", req, cb)
}
/**
* 本接口(DescribeDBSlowlogs)用于获取慢查询日志。已于2021.09.01日正式废弃,后续此接口将不再返回任何数据,新接口为DescribeSlowQueryList,详细请查看:https://cloud.tencent.com/document/product/409/60540
*/
async DescribeDBSlowlogs(
req: DescribeDBSlowlogsRequest,
cb?: (error: string, rep: DescribeDBSlowlogsResponse) => void
): Promise<DescribeDBSlowlogsResponse> {
return this.request("DescribeDBSlowlogs", req, cb)
}
/**
* 开通serverlessDB实例外网
*/
async OpenServerlessDBExtranetAccess(
req: OpenServerlessDBExtranetAccessRequest,
cb?: (error: string, rep: OpenServerlessDBExtranetAccessResponse) => void
): Promise<OpenServerlessDBExtranetAccessResponse> {
return this.request("OpenServerlessDBExtranetAccess", req, cb)
}
} | the_stack |
import Extension from "../service/extension";
import {
Options,
EAction,
Site,
SiteSchema,
Plugin,
ButtonOption,
NoticeOptions,
EDownloadClientType,
ESizeUnit,
EDataResultType,
Request,
EButtonType,
ECommonKey,
Dictionary,
EPluginPosition
} from "@/interface/common";
import { APP } from "@/service/api";
import { filters } from "@/service/filters";
import { PathHandler } from "@/service/pathHandler";
import i18n from "i18next";
import { InfoParser } from "@/background/infoParser";
import { PPF } from "@/service/public";
declare global {
interface Window {
Drag: any;
}
}
/**
* 插件背景脚本,会插入到每个页面
*/
class PTPContent {
public extension: Extension;
public options: Options = {
sites: [],
clients: []
};
public site: Site = {
name: ""
};
public action = EAction;
public filters = filters;
public defaultClient: any;
public downloadClientType = EDownloadClientType;
public sizeUnit = ESizeUnit;
public buttonType = EButtonType;
public allSiteKey = ECommonKey.allSite;
public schema: SiteSchema = {};
private scripts: any[] = [];
private styles: any[] = [];
private messageItems: Dictionary<any> = {};
public buttonBar: JQuery = <any>null;
public droper: JQuery = $(
"<div style='display:none;' class='pt-plugin-droper'/>"
);
private buttons: any[] = [];
private logo: JQuery = <any>null;
// 插件是否被重新启用过(暂不可用),onSuspend 事件无法执行。
private backgroundServiceIsStoped = false;
// 用于接收页面程序
public pageApp: any;
// 当前页面地址
public locationURL: string = location.href;
// 保存路径处理器
public pathHandler: PathHandler = new PathHandler();
// 多语言处理器
public i18n = i18n;
// 页面解析器
public infoParser: InfoParser = new InfoParser();
// 当前页面选择器配置
public pageSelector: any = {};
// 自动确定工具栏位置
public autoPosition: boolean = true;
// 保存当前工具栏位置key
private positionStorageKey = "";
constructor() {
this.extension = new Extension();
if (this.extension.isExtensionMode) {
this.readConfig();
this.initBrowserEvent();
}
}
private readConfig() {
this.extension.sendRequest(EAction.readConfig, (result: any) => {
this.options = result;
this.initI18n();
});
}
private init() {
this.initPages();
// 由于无法直接绑定 window 相关事件,故使用定时器来监听地址变化
// 主要用于单页面站点,地址栏由 history.pushState 等方法来变更后可以重新创建插件图标
setInterval(() => {
this.checkLocationURL();
}, 1000);
}
/**
* 初始化多语言环境
*/
private initI18n() {
this.extension
.sendRequest(EAction.getCurrentLanguageResource, null, "contentPage")
.then(resource => {
// console.log(resource);
let locale = this.options.locale || "en";
// 初始化
i18n.init({
lng: locale,
interpolation: {
prefix: "{",
suffix: "}"
},
resources: {
[locale]: {
translation: resource
}
}
});
this.init();
});
}
/**
* 根据指定的host获取已定义的站点信息
* @param host
*/
public getSiteFromHost(host: string) {
APP.debugMode && console.log("getSiteFromHost", host);
let sites: Site[] = [];
if (this.options.sites) {
sites.push(...this.options.sites);
}
if (this.options.system && this.options.system.publicSites) {
sites.push(...this.options.system.publicSites);
}
let site = sites.find((item: Site) => {
let cdn = [item.url].concat(item.cdn);
return item.host == host || cdn.join("").indexOf(host) > -1;
});
if (site) {
return JSON.parse(JSON.stringify(site));
}
return null;
}
/**
* 初始化符合条件的附加页面
*/
private initPages() {
this.initSiteConfig().then(() => {
this.initPlugins();
}).catch(() => {
APP.debugMode && console.log("initPages 失败");
});
}
/**
* 初始化站点配置
*/
private initSiteConfig(): Promise<any> {
return new Promise<any>((resolve?: any, reject?: any) => {
if (!this.options.showToolbarOnContentPage) {
reject();
return;
}
// 判断当前页面的所属站点是否已经被定义
this.site = this.getSiteFromHost(window.location.hostname);
if (this.site) {
// 适应多域名
this.site.url = window.location.origin + "/";
}
// 如果当前站点未定义,则不再继续操作
if (this.site && this.site.name) {
if (typeof this.site.schema === "string") {
this.schema =
this.options.system &&
this.options.system.schemas &&
this.options.system.schemas.find((item: SiteSchema) => {
return item.name == this.site.schema;
});
} else {
let site =
this.options.system &&
this.options.system.sites &&
this.options.system.sites.find((item: Site) => {
return item.host == this.site.host;
});
if (site && site.schema && typeof site.schema !== "string") {
this.schema = site.schema;
this.schema.siteOnly = true;
}
}
// 等待页面选择器加载完成后,再加载插件内容
this.initPageSelector().finally(() => {
resolve();
});
} else {
reject();
}
});
}
/**
* 初始化符合条件的插件
*/
private initPlugins() {
this.positionStorageKey = `pt-plugin-${this.site.host}-position`;
this.scripts = [];
this.styles = [];
// 初始化插件按钮列表
this.initButtonBar();
this.initDroper();
if (this.schema && this.schema.plugins) {
// 获取符合当前网站所需要的附加脚本
this.schema.plugins.forEach((plugin: Plugin) => {
let index = plugin.pages.findIndex((page: string) => {
let fullpath = window.location.href;
let path = window.location.pathname;
let indexOf = fullpath.indexOf(page);
// 如果页面不包含,则使用正则尝试
if (indexOf === -1) {
return new RegExp(page, "").test(path);
}
return true;
// return window.location.pathname.indexOf(page) !== -1;
});
if (index !== -1) {
plugin.scripts.forEach((script: string) => {
let path = script;
// 判断是否为相对路径
if (path.substr(0, 1) !== "/") {
path = this.schema.siteOnly
? `sites/${this.site.host}/${script}`
: `schemas/${this.schema.name}/${script}`;
}
this.scripts.push({
type: "file",
content: path
});
});
}
});
}
// 获取系统定义的网站信息
let systemSite =
this.options.system &&
this.options.system.sites &&
this.options.system.sites.find((item: Site) => {
return item.host == this.site.host;
});
if (!this.site.plugins) {
this.site.plugins = [];
} else if (this.site.schema !== "publicSite" && systemSite) {
for (let index = this.site.plugins.length - 1; index >= 0; index--) {
const item = this.site.plugins[index];
// 删除非自定义的插件,从系统定义中重新获取
if (!item.isCustom) {
this.site.plugins.splice(index, 1);
}
}
}
if (systemSite && systemSite.plugins) {
// 将系统定义的内容添加到最前面,确保基本库优先加载
this.site.plugins = systemSite.plugins.concat(this.site.plugins);
}
// 网站指定的脚本
if (this.site.plugins) {
let siteConfigPath =
this.site.schema == "publicSite" ? "publicSites" : "sites";
if (this.site.path) {
siteConfigPath += `/${this.site.path}`;
} else {
siteConfigPath += `/${this.site.host}`;
}
this.site.plugins.forEach((plugin: Plugin) => {
let index = plugin.pages.findIndex((page: string) => {
let path = window.location.pathname;
let indexOf = path.indexOf(page);
// 如果页面不包含,则使用正则尝试
if (indexOf === -1) {
return new RegExp(page, "").test(path);
}
return true;
});
if (index !== -1) {
plugin.scripts &&
plugin.scripts.forEach((script: any) => {
let path = script;
// 判断是否为相对路径
if (path.substr(0, 1) !== "/" && path.substr(0, 4) !== "http") {
path = `${siteConfigPath}/${script}`;
}
// 文件
this.scripts.push({
type: "file",
content: path
});
});
if (plugin.script) {
// 代码
this.scripts.push({
type: "code",
content: plugin.script
});
}
if (plugin.styles) {
plugin.styles.forEach((style: string) => {
let path = style;
if (path.substr(0, 1) !== "/" && path.substr(0, 4) !== "http") {
path = `${siteConfigPath}/${style}`;
}
this.styles.push({
type: "file",
content: path
});
});
}
if (plugin.style) {
// 代码
this.styles.push({
type: "code",
content: plugin.style
});
}
}
});
}
if (this.styles && this.styles.length > 0) {
this.styles.forEach((path: string) => {
APP.applyStyle(path);
});
}
// 加入脚本并执行
if (this.scripts && this.scripts.length > 0) {
this.scripts.forEach((script: any) => {
APP.addScript(script);
});
// 按顺序执行所有脚本
APP.applyScripts();
}
// 通知后台添加了一个新页面
this.extension.sendRequest(EAction.addContentPage).catch(error => {
console.log(error);
});
}
/**
* 调用一个方法
* @param action 需要执行的命令
* @param data 额外需要传递的数据
* @return Promise
*/
public call(action: EAction, data?: any): Promise<any> {
return new Promise<any>((resolve?: any, reject?: any) => {
if (this.backgroundServiceIsStoped) {
reject({
msg: i18n.t("backgroundServiceIsStoped") //"插件已被禁用过重启过,请刷新页面后再重试"
});
return;
}
try {
this.extension
.sendRequest(action, null, data)
.then((result: any) => {
if (result) {
resolve && resolve(result);
} else {
reject && reject();
}
})
.catch((result: any) => {
reject(result);
});
} catch (error) {
//`${action} 执行出错,可能后台服务不可用`
this.showNotice(
i18n.t("actionExecutionFailed", {
action
})
);
reject(error);
}
});
}
/**
* 初始化按钮栏
*/
private initButtonBar() {
// 删除之前已创建的插件按钮栏
if ($(".pt-plugin-body").length) {
$(".pt-plugin-body").remove();
}
this.buttonBar = $("<div class='pt-plugin-body'/>").appendTo(document.body);
// 启用拖放功能
if (window.Drag) {
let dragTitle = $(
"<div class='pt-plugin-drag-title' title='" + i18n.t("dragTitle") + "'>"
).appendTo(this.buttonBar);
new window.Drag(this.buttonBar.get(0), {
handle: dragTitle.get(0),
onStop: (result: any) => {
console.log(result);
this.saveButtonBarPosition(result);
}
});
// 双击重置位置
dragTitle.on("dblclick", () => {
this.resetButtonBarPosition();
});
}
this.logo = $(
"<div class='pt-plugin-logo' title='" + i18n.t("pluginTitle") + "'/>"
).appendTo(this.buttonBar);
this.logo.on("click", () => {
this.call(EAction.openOptions);
});
this.initButtonBarPosition();
this.buttonBar.hide();
}
/**
* 初始化工具栏位置
*/
private initButtonBarPosition() {
let result = window.localStorage.getItem(this.positionStorageKey);
if (result) {
try {
let position = JSON.parse(result);
this.buttonBar.css({
top: position.top,
left: position.left
});
this.autoPosition = false;
return;
} catch (error) {
console.log(error);
}
}
this.buttonBar.css({
top: window.innerHeight / 2,
left: "unset"
});
if (this.options.position == EPluginPosition.left) {
this.buttonBar.css({
right: "unset",
left: "5px"
});
}
}
/**
* 重置工具栏位置
*/
private resetButtonBarPosition() {
window.localStorage.removeItem(this.positionStorageKey);
this.autoPosition = true;
this.initButtonBarPosition();
this.recalculateButtonBarPosition();
}
/**
* 保存工具栏位置
* @param position
*/
private saveButtonBarPosition(position: any) {
window.localStorage.setItem(
this.positionStorageKey,
JSON.stringify(position)
);
this.autoPosition = false;
}
/**
* 添加一个按钮
* @param options 按钮参数
*/
public addButton(options: ButtonOption) {
options = Object.assign(
{
type: EButtonType.normal
},
options
);
let line = $("<hr/>").appendTo(this.buttonBar);
let buttonType = "<a class='pt-plugin-button'/>";
if (!options.click || options.type == EButtonType.label) {
buttonType = "<span class='pt-plugin-button'/>";
}
let button = $(buttonType)
.attr({
title: options.title,
key: options.key
})
.data("line", line);
let inner = $("<div class='pt-plugin-button-inner'/>").appendTo(button);
let loading = $("<div class='pt-plugin-loading'/>").appendTo(button);
let success = $("<div class='action-success'/>")
.html('<div class="action-success-ani"></div>')
.appendTo(button);
if (options.icon) {
$("<i class='material-icons md-36'/>")
.html(options.icon)
.appendTo(inner);
}
$("<div/>")
.html(options.label)
.appendTo(inner);
let onSuccess = (result: any) => {
if (options.type == EButtonType.normal) {
loading.hide();
} else {
inner.hide();
}
success.show();
if (result && result.msg) {
if (!result.type) {
result.type = "success";
}
this.showNotice(result);
}
setTimeout(() => {
success.hide();
inner.show();
}, 2000);
};
let onError = (error: any) => {
if (options.type == EButtonType.normal) {
loading.hide();
}
inner.show();
this.showNotice({
msg:
error ||
i18n.t("callbackFailed", {
label: options.label
}) // `${options.label} 发生错误,请重试。`
});
};
if (options.click) {
button.click(event => {
if (options.type == EButtonType.normal) {
inner.hide();
loading.show();
}
(<any>options).click(onSuccess, onError, event);
});
}
button.appendTo(this.buttonBar);
// 是否指定了拖放事件
if (options.onDrop) {
this.addDroper(button, options.onDrop, onSuccess, onError);
}
this.buttons.push(button);
this.recalculateButtonBarPosition();
}
/**
* 删除指定Key的按钮
* @param key
*/
public removeButton(key: string) {
let index = this.buttons.findIndex((button: JQuery) => {
return button.attr("key") == key;
});
if (index != -1) {
let button = this.buttons[index];
let line = button.data("line");
if (line) {
line.remove();
}
button.remove();
this.buttons.splice(index, 1);
}
this.recalculateButtonBarPosition();
}
/**
* 重新计算工具栏位置
*/
public recalculateButtonBarPosition() {
if (this.buttons.length > 0) {
this.buttonBar.show();
} else {
this.buttonBar.hide();
}
if (!this.autoPosition) {
return;
}
this.buttonBar.css({
top: window.innerHeight / 2 - <any>this.buttonBar.outerHeight(true) / 2
});
}
/**
* 显示消息提示
* @param options 需要显示的消息选项
* @return DOM
*/
public showNotice(options: NoticeOptions | string) {
APP.debugMode && console.log(options);
options = Object.assign(
{
type: "error",
timeout: 5,
position: "bottomRight",
progressBar: true,
width: 320,
indeterminate: false
},
typeof options === "string"
? { msg: options }
: typeof options.msg === "object"
? options.msg
: options
);
options.text = options.text || options.msg;
if (options.timeout) {
options.timeout = options.timeout * 10;
}
delete options.msg;
let notice = new (<any>window)["NoticeJs"](options);
if (options.indeterminate === true) {
this.messageItems[notice.id] = notice;
notice.show();
return notice;
}
return $(notice.show());
}
/**
* 隐藏并关闭指定消息
* @param id
*/
public hideMessage(id: string) {
if (this.messageItems[id]) {
this.messageItems[id].close();
}
}
/**
* 获取当前站点的默认下载目录
* @param string clientId 指定客户端ID,不指定表示使用默认下载客户端
* @return string 目录信息,如果没有定义,则返回空字符串
*/
public getSiteDefaultPath(clientId: string = ""): string {
if (!clientId) {
clientId =
this.site.defaultClientId || <string>this.options.defaultClientId;
}
let client = this.options.clients.find((item: any) => {
return item.id === clientId;
});
let path = "";
if (client && client.paths) {
for (const host in client.paths) {
if (this.site.host === host) {
path = client.paths[host][0];
break;
}
}
}
// 替换目录中的关键字后再返回
return this.pathHandler.replacePathKey(path, this.site);
}
/**
* 获取指定客户端配置
* @param clientId
*/
public getClientOptions(clientId: string = "") {
if (!clientId) {
clientId =
this.site.defaultClientId || <string>this.options.defaultClientId;
}
let client = this.options.clients.find((item: any) => {
return item.id === clientId;
});
return client;
}
/**
* 初始化拖放对象
*/
public initDroper() {
if (!this.options.allowDropToSend) return;
// 添加文档拖放事件
document.addEventListener("dragstart", (e: any) => {
if (e.target.tagName == "A") {
let data = {
url: e.target.getAttribute("href"),
title: e.target.getAttribute("title")
};
e.dataTransfer.setData("text/plain", JSON.stringify(data));
}
});
// 拖入时
this.buttonBar.on("dragover", (e: any) => {
e.stopPropagation();
e.preventDefault();
this.showDroper();
});
this.buttonBar.on("dragleave", (e: any) => {
this.buttonBar.removeClass("pt-plugin-body-over");
});
this.buttonBar.on("mouseleave", (e: any) => {
this.buttonBar.removeClass("pt-plugin-body-over");
this.hideDroper();
});
this.droper.appendTo(this.buttonBar);
// 拖入接收对象时
this.droper[0].addEventListener(
"dragover",
(e: any) => {
//console.log(e);
e.stopPropagation();
e.preventDefault();
// e.dataTransfer.dropEffect = "copy";
// if (e.target.tagName == "A") {
// let data = {
// url: e.target.getAttribute("href"),
// title: e.target.getAttribute("title")
// };
// e.dataTransfer.setData("text/plain", JSON.stringify(data));
// }
this.logo.addClass("pt-plugin-onLoading");
this.buttonBar.addClass("pt-plugin-body-over");
},
false
);
// 拖放事件
this.droper[0].addEventListener(
"drop",
(e: any) => {
//console.log(e);
e.stopPropagation();
e.preventDefault();
this.hideDroper();
// 获取未处理的地址
try {
let data = JSON.parse(e.dataTransfer.getData("text/plain"));
if (data) {
if (data.url) {
// IMDb地址
let IMDbMatch = data.url.match(/imdb\.com\/title\/(tt\d+)/);
if (IMDbMatch && IMDbMatch.length > 1) {
this.extension.sendRequest(
EAction.openOptions,
null,
`search-torrent/${IMDbMatch[1]}`
);
this.logo.removeClass("pt-plugin-onLoading");
return;
}
if (this.pageApp) {
this.pageApp
.call(EAction.downloadFromDroper, data)
.then(() => {
this.logo.removeClass("pt-plugin-onLoading");
})
.catch(() => {
this.logo.removeClass("pt-plugin-onLoading");
});
} else {
this.showNotice({
type: EDataResultType.info,
msg: i18n.t("notSupported"), // "当前页面不支持此操作",
timeout: 3
});
this.logo.removeClass("pt-plugin-onLoading");
}
} else {
this.logo.removeClass("pt-plugin-onLoading");
}
}
} catch (error) {
this.logo.removeClass("pt-plugin-onLoading");
}
},
false
);
// 离开拖放时
this.droper.on("dragleave dragend", (e: any) => {
e.stopPropagation();
e.preventDefault();
this.hideDroper();
this.logo.removeClass("pt-plugin-onLoading");
this.buttonBar.removeClass("pt-plugin-body-over");
});
}
/**
* 增加拖放对象
* @param parent
* @param onDrop
*/
public addDroper(
parent: any,
onDrop: Function,
onSuccess: Function,
onError: Function
) {
if (!onDrop) {
return;
}
let droper: JQuery = $(
"<div style='display:none;' class='pt-plugin-droper'/>"
);
droper.appendTo(this.buttonBar);
// 拖入接收对象时
droper.on("dragover", (e: any) => {
//console.log(e);
e.stopPropagation();
e.preventDefault();
this.buttonBar.addClass("pt-plugin-body-over");
});
// 拖放事件
droper.on("drop", (e: any) => {
console.log(e);
e.stopPropagation();
e.preventDefault();
this.hideDroper();
// 获取未处理的地址
try {
let data = JSON.parse(
e.originalEvent.dataTransfer.getData("text/plain")
);
if (data && data.url) {
onDrop.call(this, data, e, onSuccess, onError);
}
} catch (error) {
// 错误时,尝试直接使用文本内容
let data = e.originalEvent.dataTransfer.getData("text/plain");
if (data) {
data = {
url: data
};
onDrop.call(this, data, e, onSuccess, onError);
}
}
});
// 离开拖放时
droper.on("dragleave dragend", (e: any) => {
e.stopPropagation();
e.preventDefault();
this.hideDroper();
this.buttonBar.removeClass("pt-plugin-body-over");
});
// 设置位置
droper.offset(parent.position());
}
private hideDroper() {
$(".pt-plugin-droper").hide();
}
private showDroper() {
$(".pt-plugin-droper").show();
}
private initBrowserEvent() {
chrome.runtime.onMessage.addListener(
(
message: Request,
sender: chrome.runtime.MessageSender,
callback: (response: any) => void
) => {
APP.debugMode && console.log("content.onMessage", message);
switch (message.action) {
case EAction.showMessage:
let notice = this.showNotice(message.data);
callback && callback(notice);
break;
case EAction.hideMessage:
this.hideMessage(message.data);
break;
case EAction.serviceStoped:
this.backgroundServiceIsStoped = true;
break;
}
}
);
}
/**
* 验证地址栏变化后重新创建插件图标
*/
public checkLocationURL() {
if (location.href != this.locationURL) {
this.locationURL = location.href;
this.initPages();
}
}
/**
* 加载页面选择器
*/
private initPageSelector(): Promise<any> {
return new Promise<any>((resolve?: any, reject?: any) => {
this.call(EAction.getSiteSelectorConfig, {
host: this.site.host,
name: location.pathname
})
.then(result => {
this.pageSelector = result;
resolve();
})
.catch(() => {
// 如果没有当前页面的选择器,则尝试获取通用的选择器
this.call(EAction.getSiteSelectorConfig, {
host: this.site.host,
name: "common"
})
.then(result => {
this.pageSelector = result;
resolve();
})
.catch(() => {
// 没有选择器
resolve();
});
});
});
}
/**
* 从当前页面或指定DOM中获取指定字段的内容
* @param fieldName 字段名称
* @param content 指定的父元素,默认为 body
* @return null 表示没有获取到内容
*/
public getFieldValue(fieldName: string = "", content: any = $("body")) {
let selector: any;
console.log("getFieldValue", fieldName);
if (this.pageSelector && this.pageSelector.fields) {
selector = this.pageSelector.fields[fieldName];
if (!selector) {
return null;
}
} else {
return null;
}
return this.infoParser.getFieldData(content, selector, this.pageSelector);
}
}
// 暴露到 window 对象
Object.assign(window, {
PTService: new PTPContent(),
PPF
}); | the_stack |
import { deriveSharedKey } from '@/connection/deriveSharedKey'
import * as identity from '@/connection/identity'
import {
AcceptInvitationMessage,
ChallengeIdentityMessage,
ClaimIdentityMessage,
ConnectionMessage,
DisconnectMessage,
EncryptedMessage,
ErrorMessage,
isNumberedConnectionMessage,
NumberedConnectionMessage,
ProveIdentityMessage,
SeedMessage,
SyncMessage,
} from '@/connection/message'
import { orderedDelivery } from '@/connection/orderedDelivery'
import {
Condition,
ConnectionContext,
ConnectionParams,
ConnectionState,
isInvitee,
SendFunction,
StateMachineAction,
} from '@/connection/types'
import { Device, getDeviceId, parseDeviceId } from '@/device'
import * as invitations from '@/invitation'
import { Team, TeamSignatureChain } from '@/team'
import { assert, debug, EventEmitter, truncateHashes } from '@/util'
import { arraysAreEqual } from '@/util/arraysAreEqual'
import { syncMessageSummary as syncMessageSummary } from '@/util/testing/messageSummary'
import { asymmetric, Payload, randomKey, symmetric } from '@herbcaudill/crypto'
import {
generateMessage,
headsAreEqual,
initSyncState,
KeyType,
receiveMessage,
redactKeys,
SyncState,
} from 'crdx'
import { assign, createMachine, interpret, Interpreter } from 'xstate'
import { protocolMachine } from './protocolMachine'
const { DEVICE } = KeyType
/**
* Wraps a state machine (using [XState](https://xstate.js.org/docs/)) that
* implements the connection protocol. The XState configuration is in `protocolMachine`.
*/
export class Connection extends EventEmitter {
private peerUserName: string = '?'
private sendFn: SendFunction
private machine: Interpreter<ConnectionContext, ConnectionState, ConnectionMessage>
private incomingMessageQueue: Record<number, NumberedConnectionMessage> = {}
private outgoingMessageIndex: number = 0
private started: boolean = false
constructor({ sendMessage, context, peerUserName }: ConnectionParams) {
super()
if (peerUserName) this.peerUserName = peerUserName
this.sendFn = sendMessage
this.log = debug(`lf:auth:connection:${context.device.keys.name}:${this.peerUserName}`)
// define state machine
const machineConfig = { actions: this.actions, guards: this.guards }
const machine = createMachine(protocolMachine, machineConfig).withContext(context)
// instantiate the machine
this.machine = interpret(machine)
// emit and log transitions
this.machine.onTransition((state, event) => {
const summary = stateSummary(state.value)
this.emit('change', summary)
this.log(`${messageSummary(event)} ⏩ ${summary} `)
})
}
/** Starts (or restarts) the protocol machine. Returns this Protocol object. */
public start = (storedMessages: string[] = []) => {
this.log('starting')
if (!this.started) {
this.machine.start()
this.started = true
this.sendMessage({ type: 'REQUEST_IDENTITY' })
// deliver any stored messages we might have received before starting
storedMessages.forEach(m => {
this.deliver(m)
})
} else {
this.machine.send({ type: 'RECONNECT' })
}
return this
}
/** Sends a disconnect message to the peer. */
public stop = () => {
if (this.started && !this.machine.state.done) {
const disconnectMessage = { type: 'DISCONNECT' } as DisconnectMessage
this.sendMessage(disconnectMessage) // send disconnect message to peer
this.machine.send(disconnectMessage) // send disconnect event to local machine
}
this.removeAllListeners()
this.machine.stop()
this.machine.state.done = true
this.log('machine stopped: %o', this.machine.state.done)
return this
}
/** Returns the local user's name. */
get userName() {
if (!this.started) return '(not started)'
return 'user' in this.context && this.context.user !== undefined
? this.context.user.userName
: 'userName' in this.context && this.context.userName !== undefined
? this.context.userName
: 'unknown'
}
/** Returns the current state of the protocol machine. */
get state() {
if (!this.started) return 'disconnected'
else return this.machine.state.value
}
get context(): ConnectionContext {
if (!this.started) throw new Error(`Can't get context; machine not started`)
return this.machine.state.context
}
get user() {
return this.context.user
}
/** Returns the last error encountered by the protocol machine.
* If no error has occurred, returns undefined.
*/
get error() {
return this.context.error
}
/** Returns the team that the connection's user is a member of.
* If the user has not yet joined a team, returns undefined.
*/
get team() {
return this.context.team
}
/** Returns the connection's session key when we are in a connected state.
* Otherwise, returns `undefined`.
*/
get sessionKey() {
return this.context.sessionKey
}
get peerName() {
if (!this.started) return '(not started)'
return this.context.peer?.userName ?? this.context.theirIdentityClaim?.name ?? '?'
}
private sendMessage = (message: ConnectionMessage) => {
// add a sequential index to any outgoing messages
const index = this.outgoingMessageIndex++
const messageWithIndex = { ...message, index }
this.logMessage('out', message, index)
this.sendFn(JSON.stringify(messageWithIndex))
}
/** Sends an encrypted message to the peer we're connected with */
public send = (message: Payload) => {
assert(this.context.sessionKey)
const encryptedMessage = symmetric.encrypt(message, this.context.sessionKey)
this.sendMessage({ type: 'ENCRYPTED_MESSAGE', payload: encryptedMessage })
}
public sendSyncMessage(chain: TeamSignatureChain, prevSyncState: SyncState = initSyncState()) {
const [syncState, syncMessage] = generateMessage(chain, prevSyncState)
// undefined message means we're already synced
if (syncMessage) {
this.log('sending sync message', syncMessageSummary(syncMessage))
this.sendMessage({ type: 'SYNC', payload: syncMessage })
} else {
this.log('no sync message to send')
}
return syncState
}
/** Passes an incoming message from the peer on to this protocol machine, guaranteeing that
* messages will be delivered in the intended order (according to the `index` field on the message) */
public async deliver(serializedMessage: string) {
const message = insistentlyParseJson(serializedMessage)
assert(
isNumberedConnectionMessage(message),
`Can only deliver numbered connection messages; received
${JSON.stringify(message, null, 2)}`
)
this.logMessage('in', message, message.index)
const { queue, nextMessages } = orderedDelivery(this.incomingMessageQueue, message)
// update queue
this.incomingMessageQueue = queue
// TODO: detect hang when we've got message N+1 and message N doesn't come in for a while?
// send any messages that are ready to go out
for (const m of nextMessages) {
if (this.started && !this.machine.state.done) {
this.log(`delivering #${m.index} from ${this.peerName}`)
this.machine.send(m)
} else this.log(`stopped, not delivering #${m.index}`)
}
}
// ACTIONS
private createError = (message: () => string, details?: any) => {
const errorPayload = { message: message(), details }
const errorMessage: ErrorMessage = { type: 'ERROR', payload: errorPayload }
this.machine.send(errorMessage) // force error state locally
this.sendMessage(errorMessage) // send error to peer
return errorPayload
}
private fail = (message: () => string, details?: any) =>
assign<ConnectionContext, ConnectionMessage>({
error: () => this.createError(message, details),
})
/** These are referred to by name in `connectionMachine` (e.g. `actions: 'sendIdentityClaim'`) */
private readonly actions: Record<string, StateMachineAction> = {
sendIdentityClaim: async context => {
const payload: ClaimIdentityMessage['payload'] =
'team' in context
? {
identityClaim: {
type: DEVICE,
name: getDeviceId(context.device),
},
}
: {
proofOfInvitation: this.myProofOfInvitation(context),
deviceKeys: redactKeys(context.device.keys),
// TODO make this more readable
...('user' in context && context.user !== undefined
? { userKeys: redactKeys(context.user.keys) }
: {}),
}
this.log('sending CLAIM_IDENTITY', payload)
this.sendMessage({
type: 'CLAIM_IDENTITY',
payload,
})
},
receiveIdentityClaim: assign({
theirIdentityClaim: (context, event) => {
event = event as ClaimIdentityMessage
if ('identityClaim' in event.payload) {
// update peer user name
const deviceId = event.payload.identityClaim.name
this.peerUserName = parseDeviceId(deviceId).userName
this.log = debug(`lf:auth:connection:${context.device.keys.name}:${this.peerUserName}`)
return event.payload.identityClaim
} else {
return undefined
}
},
theyHaveInvitation: (_, event) => {
event = event as ClaimIdentityMessage
if ('proofOfInvitation' in event.payload) {
return true
} else {
return false
}
},
theirProofOfInvitation: (_, event) => {
event = event as ClaimIdentityMessage
if ('proofOfInvitation' in event.payload) {
return event.payload.proofOfInvitation
} else {
return undefined
}
},
theirUserKeys: (_, event) => {
event = event as ClaimIdentityMessage
if ('userKeys' in event.payload) {
return event.payload.userKeys
} else {
return undefined
}
},
theirDeviceKeys: (context, event) => {
event = event as ClaimIdentityMessage
if ('deviceKeys' in event.payload) {
// update peer user name
const deviceId = event.payload.deviceKeys.name
this.peerUserName = parseDeviceId(deviceId).userName
this.log = debug(`lf:auth:connection:${context.device.keys.name}:${deviceId}`)
return event.payload.deviceKeys
} else {
return undefined
}
},
}),
// handling invitations
acceptInvitation: context => {
assert(context.team)
assert(context.theirProofOfInvitation)
// admit them to the team
if ('theirUserKeys' in context && context.theirUserKeys !== undefined) {
// new member
context.team.admitMember(context.theirProofOfInvitation, context.theirUserKeys)
} else {
// new device for existing member
assert(context.theirDeviceKeys)
const keys = context.theirDeviceKeys
const { userName, deviceName } = parseDeviceId(context.theirDeviceKeys.name)
const device: Device = { userName, deviceName, keys }
context.team.admitDevice(context.theirProofOfInvitation, device)
}
// welcome them by sending the team's signature chain, so they can reconstruct team membership state
this.sendMessage({
type: 'ACCEPT_INVITATION',
payload: { chain: context.team.save() },
} as AcceptInvitationMessage)
},
joinTeam: (context, event) => {
assert(this.context.invitationSeed)
// we've just received the team's signature chain; reconstruct team
const team = this.rehydrateTeam(context, event)
// join the team
if (context.user === undefined) {
// joining as a new device for an existing member
// we get the user's keys from the team and rehydrate our user that way
context.user = team.joinAsDevice(context.userName)
} else {
// joining as a new member
// we add our current device to the team chain
team.joinAsMember()
}
// put the updated team on our context
context.team = team
},
// authenticating
/**
* Looks up the device name (e.g. alice::laptop) on the team chain. Returns an appropriate error if
* - the member is unknown
* - the member is known but has been removed
* - the member does not have a device by that name
* - the member had a device by that name but it was removed
*
* When on the happy path (user and device both in good standing) does nothing.
*/
confirmIdentityExists: (context, event) => {
// if we're not on the team yet, we don't have a way of knowing if the peer is
if (context.team === undefined) return
event = event as ClaimIdentityMessage
// if no identity claim is being made, there's nothing to confirm
if (!('identityClaim' in event.payload)) return
const { identityClaim } = event.payload
if (identityClaim === undefined) return
const deviceId = identityClaim.name
const { userName, deviceName } = parseDeviceId(deviceId)
const identityLookupResult = context.team.lookupIdentity(identityClaim)
const fail = (msg: string) => {
context.error = this.createError(() => msg)
}
switch (identityLookupResult) {
// if a member or a device was removed, we still connect with it in order to sync
case 'MEMBER_REMOVED':
case 'DEVICE_REMOVED':
case 'VALID_DEVICE':
return
case 'MEMBER_UNKNOWN':
return fail(`${userName} is not a member of this team.`)
case 'DEVICE_UNKNOWN':
return fail(`${userName} does not have a device '${deviceName}'.`)
}
},
challengeIdentity: assign({
challenge: context => {
const identityClaim = context.theirIdentityClaim!
const challenge = identity.challenge(identityClaim)
this.sendMessage({
type: 'CHALLENGE_IDENTITY',
payload: { challenge },
} as ChallengeIdentityMessage)
return challenge
},
}),
proveIdentity: (context, event) => {
assert(context.user)
const { challenge } = (event as ChallengeIdentityMessage).payload
const proof = identity.prove(challenge, context.device.keys)
this.sendMessage({
type: 'PROVE_IDENTITY',
payload: { challenge, proof },
} as ProveIdentityMessage)
},
storePeer: assign({
peer: context => {
assert(context.team)
assert(this.peerUserName)
return context.team.members(this.peerUserName, { includeRemoved: true })
},
}),
acceptIdentity: () => {
this.sendMessage({
type: 'ACCEPT_IDENTITY',
payload: {},
})
},
// updating
listenForTeamUpdates: context => {
assert(context.team)
context.team.addListener('updated', ({ head }) => {
if (!this.machine.state.done) {
this.machine.send({ type: 'LOCAL_UPDATE', payload: { head } }) // send update event to local machine
}
})
},
// TODO: when we're syncing with someone who may have been removed, we want to receive any
// additional information they might have, but we don't want to provide them any information
// until we're sure they're still on the team
sendSyncMessage: assign({
syncState: context => {
assert(context.team)
const syncState = this.sendSyncMessage(context.team.chain, context.syncState)
return syncState
},
}),
receiveSyncMessage: assign((context, event) => {
assert(context.team)
var { team } = context
const prevSyncState = context.syncState ?? initSyncState()
const syncMessage = (event as SyncMessage).payload
const [newChain, syncState] = receiveMessage(context.team.chain, prevSyncState, syncMessage)
if (!headsAreEqual(newChain.head, team.chain.head)) {
team = context.team.merge(newChain)
const summary = JSON.stringify({
head: team.chain.head,
links: Object.keys(team.chain.links),
})
this.log(`received sync message; new chain ${summary}`)
this.emit('updated')
}
return { team, syncState }
}),
// negotiating
generateSeed: assign({ seed: _ => randomKey() }),
sendSeed: context => {
assert(context.user)
assert(context.peer)
assert(context.seed)
const recipientPublicKey = context.peer.keys.encryption
const senderPublicKey = context.user.keys.encryption.publicKey
const senderSecretKey = context.user.keys.encryption.secretKey
this.log(`encrypting %o`, { recipientPublicKey, senderPublicKey })
const encryptedSeed = asymmetric.encrypt({
secret: context.seed,
recipientPublicKey,
senderSecretKey,
})
this.sendMessage({
type: 'SEED',
payload: { encryptedSeed },
})
},
receiveSeed: assign({
theirEncryptedSeed: (_, event) => {
return (event as SeedMessage).payload.encryptedSeed
},
}),
deriveSharedKey: assign({
sessionKey: (context, event) => {
assert(context.user)
assert(context.theirEncryptedSeed)
assert(context.seed)
assert(context.peer)
// we saved our seed in context
const ourSeed = context.seed
// their seed is encrypted and stored in context
const senderPublicKey = context.peer.keys.encryption
const recipientPublicKey = context.user.keys.encryption.publicKey
const recipientSecretKey = context.user.keys.encryption.secretKey
this.log(`decrypting %o`, truncateHashes({ senderPublicKey, recipientPublicKey }))
try {
const theirSeed = asymmetric.decrypt({
cipher: context.theirEncryptedSeed,
senderPublicKey,
recipientSecretKey,
})
// with the two keys, we derive a shared key
return deriveSharedKey(ourSeed, theirSeed)
} catch (e) {
this.log(`decryption failed %o`, truncateHashes({ senderPublicKey, recipientPublicKey }))
throw e
}
},
}),
// communicating
receiveEncryptedMessage: (context, event) => {
assert(context.sessionKey)
const encryptedMessage = (event as EncryptedMessage).payload
const decryptedMessage = symmetric.decrypt(encryptedMessage, context.sessionKey)
this.emit('message', decryptedMessage)
},
// failure
receiveError: assign({
error: (_, event) => (event as ErrorMessage).payload,
}),
rejectIdentityProof: this.fail(() => {
return `${this.userName} can't verify ${this.peerName}'s proof of identity.`
}),
failNeitherIsMember: this.fail(
() => `${this.userName} can't connect with ${this.peerName} because neither one is a member.`
),
rejectInvitation: this.fail(
() => `This invitation didn't work. ${this.context.error?.message}`
),
rejectTeam: this.fail(
() =>
`${this.userName} was admitted to a team by ${this.peerName}, but it isn't the team ${this.userName} was invited to.`
),
failPeerWasRemoved: this.fail(() => `${this.peerName} was removed from the team.`),
failTimeout: this.fail(() => `${this.userName}'s connection to ${this.peerName} timed out.`),
// events for external listeners
onConnected: () => this.emit('connected'),
onJoined: () => this.emit('joined', { team: this.team, user: this.user }),
onUpdated: () => this.emit('updated'),
onDisconnected: (_, event) => this.emit('disconnected', event),
}
// GUARDS
/** These are referred to by name in `connectionMachine` (e.g. `cond: 'iHaveInvitation'`) */
private readonly guards: Record<string, Condition> = {
//
// INVITATIONS
iHaveInvitation: context => isInvitee(context) && !isMember(context),
theyHaveInvitation: context => context.theyHaveInvitation === true,
bothHaveInvitation: (...args) =>
this.guards.iHaveInvitation(...args) && this.guards.theyHaveInvitation(...args),
invitationProofIsValid: context => {
assert(context.team)
assert(context.theirProofOfInvitation)
const validation = context.team.validateInvitation(context.theirProofOfInvitation)
this.log(`invitation validation: %o`, validation)
if (validation.isValid === true) {
return true
} else {
this.context.error = validation.error
return false
}
},
joinedTheRightTeam: (context, event) => {
// Make sure my invitation exists on the signature chain of the team I'm about to join.
// This check prevents an attack in which a fake team pretends to accept my invitation.
const team = this.rehydrateTeam(context, event)
return team.hasInvitation(this.myProofOfInvitation(context))
},
// IDENTITY
peerWasRemoved: context => {
assert(context.team)
assert(context.device)
const { team, device } = context
const { userName, deviceName } = device
const memberWasRemoved = team.memberWasRemoved(userName)
const deviceWasRemoved = team.deviceWasRemoved(userName, deviceName)
return memberWasRemoved || deviceWasRemoved
},
weWereRemoved: context => {
assert(context.team)
return !context.team.has(context.user.userName)
},
identityProofIsValid: (context, event) => {
assert(context.team)
const { challenge, proof } = (event as ProveIdentityMessage).payload
return context.team.verifyIdentityProof(challenge, proof)
},
// SYNCHRONIZATION
headsAreEqual: (context, event) => {
assert(context.team)
const ourHead = context.team.chain.head
const lastCommonHead = context.syncState?.lastCommonHead
return arraysAreEqual(ourHead, lastCommonHead)
},
headsAreDifferent: (...args) => {
return !this.guards.headsAreEqual(...args)
},
}
// helpers
private logMessage = (direction: 'in' | 'out', message: ConnectionMessage, index: number) => {
const arrow = direction === 'in' ? '<-' : '->'
if (index === undefined || index.toString() === 'undefined') debugger
this.log(`${this.userName}${arrow}${this.peerName} #${index} ${messageSummary(message)}`)
}
private rehydrateTeam = (context: ConnectionContext, event: ConnectionMessage) => {
return new Team({
source: (event as AcceptInvitationMessage).payload.chain,
context: { user: context.user!, device: context.device },
})
}
private myProofOfInvitation = (context: ConnectionContext) => {
assert(context.invitationSeed)
return invitations.generateProof(context.invitationSeed)
}
}
// for debugging
const messageSummary = (message: ConnectionMessage) =>
message.type === 'SYNC'
? `SYNC ${syncMessageSummary(message.payload)}`
: // @ts-ignore
`${message.type} ${message.payload?.head?.slice(0, 5) || message.payload?.message || ''}`
const isString = (state: any) => typeof state === 'string'
const stateSummary = (state: any = 'disconnected'): string =>
isString(state)
? state === 'done'
? ''
: state
: Object.keys(state)
.map(key => `${key}:${stateSummary(state[key])}`)
.filter(s => s.length)
.join(',')
const isMember = (context: ConnectionContext) => context.team !== undefined
const insistentlyParseJson = (json: any) => {
let result = json
while (typeof result === 'string') {
result = JSON.parse(result)
}
return result
} | the_stack |
/// <reference types="./markdown-it-footnote" />
/// <reference types="./markdown-it-table-of-contents" />
/// <reference types="./markdown-it-container-pandoc" />
import express from 'express';
import markdown from 'markdown-it';
import * as Url from 'url';
import * as user from '../util/user';
import * as userUtils from '../util/user';
import * as model from '../model/user';
import * as organization from '../model/organization';
import * as device from '../model/device';
import * as blogModel from '../model/blog';
import * as nlpModelsModel from '../model/nlp_models';
import * as db from '../util/db';
import TrainingServer from '../util/training_server';
import * as iv from '../util/input_validation';
import { makeRandom } from '../util/random';
import { BadRequestError } from '../util/errors';
import * as EngineManager from '../almond/enginemanagerclient';
import * as Config from '../config';
const router = express.Router();
const USERS_PER_PAGE = 50;
const DEVICES_PER_PAGE = 50;
function renderUserList(users : Array<model.Row & { isRunning ?: boolean, engineId ?: number|string|null }>) {
const engineManager = EngineManager.get();
return Promise.all(users.map((u) => {
if (!engineManager)
return Promise.resolve();
return engineManager.getProcessId(u.id).then((pid) => {
if (pid === -1) {
u.isRunning = false;
u.engineId = null;
} else {
u.isRunning = true;
u.engineId = pid;
}
});
})).then(() => users);
}
router.use(user.requireLogIn);
router.get('/', user.requireAnyRole(user.Role.ALL_ADMIN), (req, res, next) => {
res.render('admin_portal', { page_title: req._("Thingpedia - Administration"),
csrfToken: req.csrfToken() });
});
router.get('/users', user.requireRole(user.Role.ADMIN),
iv.validateGET({ page: '?integer', sort: /^$|^(id|username|human_name|registration_time|lastlog_time)\/(asc|desc)$/ }), (req, res, next) => {
let page : number;
if (req.query.page === undefined)
page = 0;
else
page = parseInt(req.query.page);
if (page < 0)
page = 0;
const sort = req.query.sort || 'id/asc';
db.withClient((dbClient) => {
return model.getAll(dbClient, page * USERS_PER_PAGE, USERS_PER_PAGE + 1, sort);
}).then(renderUserList).then((users) => {
res.render('admin_user_list', { page_title: req._("Genie - Administration"),
csrfToken: req.csrfToken(),
users: users,
page_num: page,
sort: sort,
search: '',
USERS_PER_PAGE });
}).catch(next);
});
router.get('/users/search', user.requireRole(user.Role.ADMIN), iv.validateGET({ q: 'string' }), (req, res, next) => {
db.withClient((dbClient) => {
if (Number.isInteger(+req.query.q))
return Promise.all([model.get(dbClient, +req.query.q)]);
else
return model.getSearch(dbClient, req.query.q);
}).then(renderUserList).then((users) => {
res.render('admin_user_list', { page_title: req._("Genie - User List"),
csrfToken: req.csrfToken(),
users: users,
page_num: 0,
search: req.query.search,
USERS_PER_PAGE });
}).catch(next);
});
router.post('/users/kill/all', user.requireRole(user.Role.ADMIN), (req, res, next) => {
const engineManager = EngineManager.get();
Promise.resolve().then(() => {
return engineManager.killAllUsers();
}).then(() => {
res.redirect(303, '/admin/users');
}).catch(next);
});
router.post('/users/kill/:id', user.requireRole(user.Role.ADMIN), (req, res, next) => {
const engineManager = EngineManager.get();
engineManager.killUser(parseInt(req.params.id)).then(() => {
res.redirect(303, '/admin/users/search?q=' + req.params.id);
}).catch(next);
});
router.post('/users/start/:id', user.requireRole(user.Role.ADMIN), (req, res, next) => {
const engineManager = EngineManager.get();
const id = parseInt(req.params.id);
engineManager.isRunning(id).then((isRunning) => {
if (isRunning)
return Promise.resolve();
else
return engineManager.startUser(id);
}).then(() => {
res.redirect(303, '/admin/users/search?q=' + req.params.id);
}).catch(next);
});
async function getTraining(req : express.Request, res : express.Response) {
const [jobs, models] = await db.withClient((dbClient) => {
return Promise.all([
TrainingServer.get().getJobQueue(dbClient),
nlpModelsModel.getTrained(dbClient)
]);
});
const metrics : Record<string, unknown> = {};
for (const model of models) {
if (!model.metrics)
continue;
const key = model.tag + '/' + model.language;
metrics[key] = JSON.parse(model.metrics);
}
res.render('admin_training', { page_title: req._("Thingpedia - Administration - Natural Language Training"),
csrfToken: req.csrfToken(),
metrics,
jobs });
}
if (Config.WITH_LUINET === 'embedded') {
router.get('/training', user.requireRole(user.Role.NLP_ADMIN), (req, res, next) => {
getTraining(req, res).catch(next);
});
router.post('/training', user.requireRole(user.Role.NLP_ADMIN), iv.validatePOST({ language: 'string', job_type: 'string' }), (req, res, next) => {
TrainingServer.get().queue(req.body.language, null, req.body.job_type).then(() => {
return getTraining(req, res);
}).catch(next);
});
router.post('/training/kill', user.requireRole(user.Role.NLP_ADMIN), iv.validatePOST({ job_id: 'integer' }), (req, res, next) => {
TrainingServer.get().kill(Number(req.body.job_id)).then(() => {
return res.redirect(303, '/admin/training');
}).catch(next);
});
}
router.post('/users/delete/:id', user.requireRole(user.Role.ADMIN), (req, res, next) => {
const id = Number(req.params.id);
if (req.user!.id === id) {
res.render('error', { page_title: req._("Thingpedia - Error"),
message: req._("You cannot delete yourself") });
return;
}
db.withTransaction((dbClient) => {
return EngineManager.get().deleteUser(id).then(() => {
return model.delete(dbClient, id);
});
}).then(() => {
res.redirect(303, '/admin/users');
}).catch(next);
});
router.post('/users/promote/:id', user.requireRole(user.Role.ADMIN), (req, res, next) => {
const id = Number(req.params.id);
db.withTransaction(async (dbClient) => {
const user = await model.get(dbClient, id);
if (user.developer_status >= userUtils.DeveloperStatus.ORG_ADMIN)
return false;
if (user.developer_org === null) {
const org = await organization.create(dbClient, {
name: '',
comment: '',
id_hash: makeRandom(8),
developer_key: makeRandom()
});
await userUtils.makeDeveloper(dbClient, user.id, org.id, userUtils.DeveloperStatus.ORG_ADMIN);
return true;
} else {
await model.update(dbClient, user.id, { developer_status: user.developer_status + 1 });
return false;
}
}).then((needsRestart) => {
if (needsRestart)
return EngineManager.get().restartUser(id);
else
return Promise.resolve();
}).then(() => {
res.redirect(303, '/admin/users/search?q=' + id);
}).catch(next);
});
router.post('/users/demote/:id', user.requireRole(user.Role.ADMIN), (req, res, next) => {
const id = Number(req.params.id);
if (req.user!.id === id) {
res.render('error', { page_title: req._("Thingpedia - Error"),
message: req._("You cannot demote yourself") });
return;
}
db.withTransaction(async (dbClient) => {
const user = await model.get(dbClient, id);
if (user.developer_status <= 0)
return;
await model.update(dbClient, id, { developer_status: user.developer_status - 1 });
}).then(() => {
res.redirect(303, '/admin/users/search?q=' + req.params.id);
}).catch(next);
});
router.post('/users/revoke-developer/:id', user.requireRole(user.Role.ADMIN), (req, res, next) => {
const id = Number(req.params.id);
if (req.user!.id === id) {
res.render('error', { page_title: req._("Thingpedia - Error"),
message: req._("You cannot revoke your own dev credentials yourself") });
return;
}
db.withTransaction(async (dbClient) => {
// check the user exists
await model.get(dbClient, id);
await userUtils.makeDeveloper(dbClient, id, null, userUtils.DeveloperStatus.USER);
}).then(() => EngineManager.get().restartUserWithoutCache(id)).then(() => {
res.redirect(303, '/admin/users/search?q=' + id);
}).catch(next);
});
if (Config.WITH_THINGPEDIA === 'embedded') {
router.get('/review-queue', user.requireRole(user.Role.THINGPEDIA_ADMIN), iv.validateGET({ page: '?integer' }), (req, res, next) => {
let page : number;
if (req.query.page === undefined)
page = 0;
else
page = parseInt(req.query.page);
if (page < 0)
page = 0;
db.withClient((dbClient) => {
return device.getReviewQueue(dbClient, page * DEVICES_PER_PAGE, DEVICES_PER_PAGE + 1);
}).then((devices) => {
res.render('admin_review_queue', { page_title: req._("Genie - Administration"),
csrfToken: req.csrfToken(),
devices: devices,
page_num: page,
DEVICES_PER_PAGE });
}).catch(next);
});
}
router.get('/organizations', user.requireRole(user.Role.THINGPEDIA_ADMIN), iv.validateGET({ page: '?integer' }), (req, res, next) => {
let page : number;
if (req.query.page === undefined)
page = 0;
else
page = parseInt(req.query.page);
if (page < 0)
page = 0;
db.withClient((dbClient) => {
return organization.getAll(dbClient, page * 20, 21);
}).then((rows) => {
res.render('admin_org_list', { page_title: req._("Genie - Developer Organizations"),
csrfToken: req.csrfToken(),
page_num: page,
organizations: rows,
search: '' });
}).catch(next);
});
router.get('/organizations/search', user.requireRole(user.Role.THINGPEDIA_ADMIN), iv.validateGET({ q: 'string' }), (req, res, next) => {
if (!req.query.q) {
res.redirect(303, '/admin/organizations');
return;
}
db.withClient((dbClient) => {
return organization.getByFuzzySearch(dbClient, req.query.q);
}).then((rows) => {
res.render('admin_org_list', { page_title: req._("Genie - Developer Organizations"),
csrfToken: req.csrfToken(),
page_num: -1,
organizations: rows,
search: req.query.q });
}).catch(next);
});
router.get('/organizations/details/:id', user.requireRole(user.Role.THINGPEDIA_ADMIN), (req, res, next) => {
db.withClient((dbClient) => {
return Promise.all([
organization.get(dbClient, Number(req.params.id)),
organization.getMembers(dbClient, Number(req.params.id)),
device.getByOwner(dbClient, Number(req.params.id))
]);
}).then(([org, users, devices]) => {
res.render('admin_org_details', { page_title: req._("Genie - Developer Organization"),
csrfToken: req.csrfToken(),
org: org,
members: users,
devices: devices });
}).catch(next);
});
router.post('/organizations/add-member', user.requireRole(user.Role.THINGPEDIA_ADMIN),
iv.validatePOST({ id: 'integer', as_developer: 'boolean', username: 'string' }), (req, res, next) => {
db.withTransaction(async (dbClient) => {
const [user] = await model.getByName(dbClient, req.body.username);
if (!user)
throw new BadRequestError(req._("No such user %s").format(req.body.username));
if (user.developer_org !== null && user.developer_org !== Number(req.body.id))
throw new BadRequestError(req._("%s is already a member of another developer organization.").format(req.body.username));
const targetStatus = req.body.as_developer ? userUtils.DeveloperStatus.DEVELOPER : userUtils.DeveloperStatus.USER;
await userUtils.makeDeveloper(dbClient, user.id, Number(req.body.id), targetStatus);
return user.id;
}).then(async (userId) => {
if (userId !== null) {
await EngineManager.get().restartUser(userId);
res.redirect(303, '/admin/organizations/details/' + req.body.id);
}
}).catch(next);
});
router.post('/organizations/set-name', user.requireRole(user.Role.THINGPEDIA_ADMIN), iv.validatePOST({ id: 'integer', name: 'string', comment: '?string' }), (req, res, next) => {
db.withTransaction((dbClient) => {
return organization.update(dbClient, Number(req.body.id), { name: req.body.name, comment: req.body.comment });
}).then(() => {
res.redirect(303, '/admin/organizations/details/' + req.body.id);
}).catch(next);
});
const BLOG_POSTS_PER_PAGE = 10;
router.get('/blog', user.requireRole(user.Role.BLOG_EDITOR), iv.validateGET({ page: '?integer' }), (req, res, next) => {
let page : number;
if (req.query.page === undefined)
page = 0;
else
page = parseInt(req.query.page);
if (page < 0)
page = 0;
db.withClient((dbClient) => {
return blogModel.getAll(dbClient, page * BLOG_POSTS_PER_PAGE, BLOG_POSTS_PER_PAGE+1);
}).then((posts) => {
return res.render('admin_blog_archive', {
page_title: req._("Genie - Blog Archive"),
posts
});
}).catch(next);
});
router.get('/blog/update/:id', user.requireRole(user.Role.BLOG_EDITOR), (req, res, next) => {
db.withClient((dbClient) => {
return blogModel.getForEdit(dbClient, Number(req.params.id));
}).then((post) => {
return res.render('blog_create_or_edit', {
page_title: req._("Genie - Blog Editor"),
create: false,
post,
messages: req.flash('admin-blog-message'),
});
}).catch(next);
});
router.get('/blog/create', user.requireRole(user.Role.BLOG_EDITOR), (req, res, next) => {
res.render('blog_create_or_edit', {
page_title: req._("Genie - Blog Editor"),
create: true,
messages: [],
post: {
title: '',
blurb: '',
image: '',
source: ''
}
});
});
function slugify(s : string) {
return encodeURIComponent(String(s).trim().toLowerCase().replace(/\s+/g, '-')).replace(/[^a-z0-9-]/g, '');
}
import mdAnchor from 'markdown-it-anchor';
import mdHighlight from 'markdown-it-highlightjs';
import mdContainerPandoc from 'markdown-it-container-pandoc';
import mdFootnote from 'markdown-it-footnote';
import mdTOC from 'markdown-it-table-of-contents';
router.post('/blog/update', user.requireRole(user.Role.BLOG_EDITOR),
iv.validatePOST({ id: 'integer', title: 'string', image: 'string', blurb: 'string', source: 'string' }), (req, res, next) => {
const md = new markdown({ html: true });
md.renderer.rules.table_open = (tokens, idx) => {
return '<table class="table">';
};
md.use(mdAnchor);
md.use(mdHighlight);
md.use(mdContainerPandoc);
md.use(mdFootnote);
md.use(mdTOC, { includeLevel: [2,3] });
const image = Url.resolve(Config.SERVER_ORIGIN, req.body.image);
const rendered = md.render(req.body.source);
const slug = slugify(req.body.title);
db.withClient((dbClient) => {
return blogModel.update(dbClient, Number(req.body.id), {
title: req.body.title,
blurb: req.body.blurb,
image: image,
source: req.body.source,
slug: slug,
body: rendered,
});
}).then(() => {
req.flash('admin-blog-message', req._("Saved"));
return res.redirect(303, '/admin/blog/update/' + req.body.id);
}).catch(next);
});
router.post('/blog/create', user.requireRole(user.Role.BLOG_EDITOR),
iv.validatePOST({ title: 'string', image: 'string', blurb: 'string', source: 'string' }), (req, res, next) => {
const md = new markdown({ html: true });
md.renderer.rules.table_open = (tokens, idx) => {
return '<table class="table">';
};
md.use(mdAnchor);
md.use(mdHighlight);
md.use(mdContainerPandoc);
md.use(mdFootnote);
md.use(mdTOC, { includeLevel: [2,3] });
const image = Url.resolve(Config.SERVER_ORIGIN, req.body.image);
const rendered = md.render(req.body.source);
const slug = slugify(req.body.title);
db.withClient((dbClient) => {
return blogModel.create<db.Optional<blogModel.Row, blogModel.OptionalFields>>(dbClient, {
author: req.user!.id,
title: req.body.title,
image: image,
blurb: req.body.blurb,
source: req.body.source,
slug: slug,
body: rendered,
});
}).then((post) => {
req.flash('admin-blog-message', req._("Saved"));
return res.redirect(303, '/admin/blog/update/' + post.id);
}).catch(next);
});
router.post('/blog/publish', user.requireRole(user.Role.BLOG_EDITOR), iv.validatePOST({ id: 'integer' }), (req, res, next) => {
db.withClient((dbClient) => {
return blogModel.publish(dbClient, Number(req.body.id));
}).then(() => {
return res.redirect(303, '/admin/blog');
}).catch(next);
});
router.post('/blog/unpublish', user.requireRole(user.Role.BLOG_EDITOR), iv.validatePOST({ id: 'integer' }), (req, res, next) => {
db.withClient((dbClient) => {
return blogModel.unpublish(dbClient, Number(req.body.id));
}).then(() => {
return res.redirect(303, '/admin/blog');
}).catch(next);
});
router.post('/blog/delete', user.requireRole(user.Role.BLOG_EDITOR), iv.validatePOST({ id: 'integer' }), (req, res, next) => {
db.withClient((dbClient) => {
return blogModel.delete(dbClient, Number(req.body.id));
}).then(() => {
return res.redirect(303, '/admin/blog');
}).catch(next);
});
export default router; | the_stack |
import * as url from 'url';
import * as path from 'path';
import { Protocol as Crdp } from 'devtools-protocol';
import { logger } from 'vscode-debugadapter';
import * as utils from '../utils';
import { ITarget } from './chromeConnection';
import { IPathMapping } from '../debugAdapterInterfaces';
import { pathToRegex } from '../utils';
import { isInternalRemotePath } from '../remoteMapper';
import { Socket } from 'net';
import * as errors from '../errors';
/**
* Takes the path component of a target url (starting with '/') and applies pathMapping
*/
export function applyPathMappingsToTargetUrlPath(scriptUrlPath: string, pathMapping: IPathMapping): string {
if (!pathMapping) {
return '';
}
if (!scriptUrlPath || !scriptUrlPath.startsWith('/')) {
return '';
}
const mappingKeys = Object.keys(pathMapping)
.sort((a, b) => b.length - a.length);
for (let pattern of mappingKeys) {
// empty pattern match nothing use / to match root
if (!pattern) {
continue;
}
const mappingRHS = pathMapping[pattern];
if (pattern[0] !== '/') {
logger.log(`PathMapping keys should be absolute: ${pattern}`);
pattern = '/' + pattern;
}
if (pathMappingPatternMatchesPath(pattern, scriptUrlPath)) {
return toClientPath(pattern, mappingRHS, scriptUrlPath);
}
}
return '';
}
function pathMappingPatternMatchesPath(pattern: string, scriptPath: string): boolean {
if (pattern === scriptPath) {
return true;
}
if (!pattern.endsWith('/')) {
// Don't match /foo with /foobar/something
pattern += '/';
}
return scriptPath.startsWith(pattern);
}
export function applyPathMappingsToTargetUrl(scriptUrl: string, pathMapping: IPathMapping): string {
const parsedUrl = url.parse(scriptUrl);
if (!parsedUrl.protocol || parsedUrl.protocol.startsWith('file') || !parsedUrl.pathname) {
// Skip file: URLs and paths, and invalid things
return '';
}
return applyPathMappingsToTargetUrlPath(parsedUrl.pathname, pathMapping);
}
function toClientPath(pattern: string, mappingRHS: string, scriptPath: string): string {
const rest = decodeURIComponent(scriptPath.substring(pattern.length));
const mappedResult = rest ?
utils.properJoin(mappingRHS, rest) :
mappingRHS;
return mappedResult;
}
/**
* Maps a url from target to an absolute local path, if it exists.
* If not given an absolute path (with file: prefix), searches the current working directory for a matching file.
* http://localhost/scripts/code.js => d:/app/scripts/code.js
* file:///d:/scripts/code.js => d:/scripts/code.js
*/
export async function targetUrlToClientPath(aUrl: string, pathMapping: IPathMapping): Promise<string> {
if (!aUrl) {
return '';
}
// If the url is an absolute path to a file that exists, return it without file:///.
// A remote absolute url (cordova) will still need the logic below.
const canonicalUrl = utils.canonicalizeUrl(aUrl);
if (utils.isFileUrl(aUrl)) {
if (await utils.exists(canonicalUrl)) {
return canonicalUrl;
}
const networkPath = utils.fileUrlToNetworkPath(aUrl);
if (networkPath !== aUrl && await utils.exists(networkPath)) {
return networkPath;
}
}
// Search the filesystem under the webRoot for the file that best matches the given url
let pathName = url.parse(canonicalUrl).pathname;
if (!pathName || pathName === '/') {
return '';
}
// Dealing with the path portion of either a url or an absolute path to remote file.
const pathParts = pathName
.replace(/^\//, '') // Strip leading /
.split(/[\/\\]/);
while (pathParts.length > 0) {
const joinedPath = '/' + pathParts.join('/');
const clientPath = applyPathMappingsToTargetUrlPath(joinedPath, pathMapping);
if (isInternalRemotePath(clientPath)) {
return clientPath;
} else if (clientPath && await utils.exists(clientPath)) {
return utils.canonicalizeUrl(clientPath);
}
pathParts.shift();
}
return '';
}
/**
* Convert a RemoteObject to a value+variableHandleRef for the client.
* TODO - Delete after Microsoft/vscode#12019!!
*/
export function remoteObjectToValue(object: Crdp.Runtime.RemoteObject, stringify = true): { value: string, variableHandleRef?: string } {
let value = '';
let variableHandleRef: string;
if (object) {
if (object.type === 'object') {
if (object.subtype === 'null') {
value = 'null';
} else {
// If it's a non-null object, create a variable reference so the client can ask for its props
variableHandleRef = object.objectId;
value = object.description;
}
} else if (object.type === 'undefined') {
value = 'undefined';
} else if (object.type === 'function') {
const firstBraceIdx = object.description.indexOf('{');
if (firstBraceIdx >= 0) {
value = object.description.substring(0, firstBraceIdx) + '{ … }';
} else {
const firstArrowIdx = object.description.indexOf('=>');
value = firstArrowIdx >= 0 ?
object.description.substring(0, firstArrowIdx + 2) + ' …' :
object.description;
}
} else {
// The value is a primitive value, or something that has a description (not object, primitive, or undefined). And force to be string
if (typeof object.value === 'undefined') {
value = object.description;
} else if (object.type === 'number') {
// .value is truncated, so use .description, the full string representation
// Should be like '3' or 'Infinity'.
value = object.description;
} else {
value = stringify ? JSON.stringify(object.value) : object.value;
}
}
}
return { value, variableHandleRef };
}
/**
* Returns the targets from the given list that match the targetUrl, which may have * wildcards.
* Ignores the protocol and is case-insensitive.
*/
export function getMatchingTargets(targets: ITarget[], targetUrlPattern: string): ITarget[] {
const standardizeMatch = (aUrl: string) => {
aUrl = aUrl.toLowerCase();
if (utils.isFileUrl(aUrl)) {
// Strip file:///, if present
aUrl = utils.fileUrlToPath(aUrl);
} else if (utils.isURL(aUrl) && aUrl.indexOf('://') >= 0) {
// Strip the protocol, if present
aUrl = aUrl.substr(aUrl.indexOf('://') + 3);
}
// Remove optional trailing /
if (aUrl.endsWith('/')) aUrl = aUrl.substr(0, aUrl.length - 1);
return aUrl;
};
targetUrlPattern = standardizeMatch(targetUrlPattern);
targetUrlPattern = utils.escapeRegexSpecialChars(targetUrlPattern, '/*').replace(/\*/g, '.*');
const targetUrlRegex = new RegExp('^' + targetUrlPattern + '$', 'g');
return targets.filter(target => !!standardizeMatch(target.url).match(targetUrlRegex));
}
const PROTO_NAME = '__proto__';
const NUM_REGEX = /^[0-9]+$/;
export function compareVariableNames(var1: string, var2: string): number {
// __proto__ at the end
if (var1 === PROTO_NAME) {
return 1;
} else if (var2 === PROTO_NAME) {
return -1;
}
const isNum1 = !!var1.match(NUM_REGEX);
const isNum2 = !!var2.match(NUM_REGEX);
if (isNum1 && !isNum2) {
// Numbers after names
return 1;
} else if (!isNum1 && isNum2) {
// Names before numbers
return -1;
} else if (isNum1 && isNum2) {
// Compare numbers as numbers
const int1 = parseInt(var1, 10);
const int2 = parseInt(var2, 10);
return int1 - int2;
}
// Compare strings as strings
return var1.localeCompare(var2);
}
export function remoteObjectToCallArgument(object: Crdp.Runtime.RemoteObject): Crdp.Runtime.CallArgument {
return {
objectId: object.objectId,
unserializableValue: object.unserializableValue,
value: object.value
};
}
/**
* .exception is not present in Node < 6.6 - TODO this would be part of a generic solution for handling
* protocol differences in the future.
* This includes the error message and full stack
*/
export function descriptionFromExceptionDetails(exceptionDetails: Crdp.Runtime.ExceptionDetails): string {
let description: string;
if (exceptionDetails.exception) {
// Take exception object description, or if a value was thrown, the value
description = exceptionDetails.exception.description ||
'Error: ' + exceptionDetails.exception.value;
} else {
description = exceptionDetails.text;
}
return description || '';
}
/**
* Get just the error message from the exception details - the first line without the full stack
*/
export function errorMessageFromExceptionDetails(exceptionDetails: Crdp.Runtime.ExceptionDetails): string {
let description = descriptionFromExceptionDetails(exceptionDetails);
const newlineIdx = description.indexOf('\n');
if (newlineIdx >= 0) {
description = description.substr(0, newlineIdx);
}
return description;
}
export function getEvaluateName(parentEvaluateName: string, name: string): string {
if (!parentEvaluateName) return name;
let nameAccessor: string;
if (/^[a-zA-Z_$][a-zA-Z_$0-9]*$/.test(name)) {
nameAccessor = '.' + name;
} else if (/^\d+$/.test(name)) {
nameAccessor = `[${name}]`;
} else {
nameAccessor = `[${JSON.stringify(name)}]`;
}
return parentEvaluateName + nameAccessor;
}
export function selectBreakpointLocation(lineNumber: number, columnNumber: number, locations: Crdp.Debugger.BreakLocation[]): Crdp.Debugger.BreakLocation {
for (let i = locations.length - 1; i >= 0; i--) {
if (locations[i].columnNumber <= columnNumber) {
return locations[i];
}
}
return locations[0];
}
export const EVAL_NAME_PREFIX = 'VM';
export function isEvalScript(scriptPath: string): boolean {
return scriptPath.startsWith(EVAL_NAME_PREFIX);
}
/* Constructs the regex for files to enable break on load
For example, for a file index.js the regex will match urls containing index.js, index.ts, abc/index.ts, index.bin.js etc
It won't match index100.js, indexabc.ts etc */
export function getUrlRegexForBreakOnLoad(url: string): string {
const fileNameWithoutFullPath = path.parse(url).base;
const fileNameWithoutExtension = path.parse(fileNameWithoutFullPath).name;
const escapedFileName = pathToRegex(fileNameWithoutExtension);
return '.*[\\\\\\/]' + escapedFileName + '([^A-z^0-9].*)?$';
}
/**
* Checks if a given tcp port is currently in use (more accurately, is there a server socket accepting connections on that port)
* @param port The port to check
* @param host Optional host, defaults to 127.0.0.1
* @param timeout Timeout for the socket connect attempt
* @returns True if a server socket is listening on the given port, false otherwise
*/
export async function isPortInUse(port: number, host = '127.0.0.1', timeout = 400): Promise<boolean> {
// Basically just create a socket and try to connect on that port, if we can connect, it's open
return new Promise<boolean>((resolve, _reject) => {
const socket = new Socket();
function createCallback(inUse: boolean) {
return () => {
resolve(inUse);
socket.removeAllListeners();
socket.destroy();
};
}
socket.setTimeout(timeout);
socket.on('connect', createCallback(true));
socket.on('timeout', createCallback(false));
socket.on('error', createCallback(false));
socket.connect(port, host);
});
}
/**
* Get the port on which chrome was launched when passed "--remote-debugging-port=0"
* @param userDataDir The profile data directory for the Chrome instance to check
* @throws If reading the port failed for any reason
*/
export async function getLaunchedPort(userDataDir: string): Promise<number> {
const activePortFilePath = path.join(userDataDir, 'DevToolsActivePort');
try {
const activePortArgs = await utils.readFileP(activePortFilePath, 'utf-8');
const [ portStr ] = activePortArgs.split('\n'); // chrome uses \n regardless of platform in this file
const port = parseInt(portStr, 10);
if (isNaN(port)) return Promise.reject(errors.activePortFileContentsInvalid(activePortFilePath, activePortArgs));
return port;
} catch (err) {
return Promise.reject(errors.failedToReadPortFromUserDataDir(userDataDir, err));
}
} | the_stack |
import "mocha";
import * as expect from "expect";
import { CambridgeZ88Core } from "../../../src/renderer/machines/cambridge-z88/CambridgeZ88Core";
import {
DefaultCambridgeZ88StateManager,
loadWaModule,
SilentAudioRenderer,
} from "../helpers";
import { setEngineDependencies } from "../../../src/renderer/machines/core/vm-engine-dependencies";
let machine: CambridgeZ88Core;
// --- Set up the virual machine engine service with the
setEngineDependencies({
waModuleLoader: (n) => loadWaModule(n),
sampleRateGetter: () => 48000,
audioRendererFactory: () => new SilentAudioRenderer(),
cz88StateManager: new DefaultCambridgeZ88StateManager(),
});
describe("Cambridge Z88 - Memory write", function () {
before(async () => {
machine = new CambridgeZ88Core({
baseClockFrequency: 3_276_800,
tactsInFrame: 16384,
firmware: [new Uint8Array(32768)],
});
await machine.setupMachine();
});
beforeEach(async () => {
await machine.setupMachine();
});
const addresses: number[] = [
0x0000, 0x1234, 0x1fff, 0x2000, 0x2345, 0x2fff, 0x3000, 0x3456, 0x3fff,
0x4000, 0x5678, 0x5fff, 0x6000, 0x6789, 0x7fff, 0x8000, 0x89ab, 0x9fff,
0xa000, 0xbcde, 0xbfff, 0xc000, 0xcdef, 0xdfff, 0xe000, 0xef01, 0xffff,
];
addresses.forEach((addr) => {
it(`ROM (${addr}) cannot be written`, () => {
machine.reset();
machine.api.setZ88RndSeed(0);
machine.api.setZ88ChipMask(0, 0x1f); // Slot 0 ROM 512K
machine.api.setZ88ChipMask(1, 0x1f); // Slot 1 RAM 512K
machine.api.setZ88ChipMask(2, 0x3f); // Slot 2 RAM 1M
machine.api.setZ88ChipMask(3, 0x3f); // Slot 3 RAM 1M
machine.api.setZ88ChipMask(4, 0x3f); // Slot 4 RAM 1M
machine.api.setZ88SlotMask(3, false); // Chip 4 is RAM
machine.api.testWriteCz88Memory(addr, 0x23);
const value = machine.api.testReadCz88Memory(addr);
expect(value).toBe(0);
});
});
addresses.forEach((addr) => {
it(`RAMS turned on (${addr})`, () => {
machine.reset();
machine.api.setZ88RndSeed(0);
machine.api.setZ88ChipMask(0, 0x1f); // Slot 0 ROM 512K
machine.api.setZ88ChipMask(1, 0x1f); // Slot 1 RAM 512K
machine.api.setZ88ChipMask(2, 0x3f); // Slot 2 RAM 1M
machine.api.setZ88ChipMask(3, 0x3f); // Slot 3 RAM 1M
machine.api.setZ88ChipMask(4, 0x3f); // Slot 4 RAM 1M
machine.api.writePortCz88(0xb0, 0x04); // Set COM.RAMS
machine.api.setZ88SlotMask(3, false); // Chip 4 is RAM
machine.api.testWriteCz88Memory(addr, 0x23);
const value = machine.api.testReadCz88Memory(addr);
if (addr <= 0x1fff) {
expect(value).toBe(0x23);
} else {
expect(value).toBe(0);
}
});
});
addresses.forEach((addr) => {
it(`Internal RAM (${addr}) can be written`, () => {
machine.reset();
machine.api.setZ88RndSeed(0);
machine.api.setZ88ChipMask(0, 0x1f); // Slot 0 ROM 512K
machine.api.setZ88ChipMask(1, 0x1f); // Slot 1 RAM 512K
machine.api.setZ88ChipMask(2, 0x3f); // Slot 2 RAM 1M
machine.api.setZ88ChipMask(3, 0x3f); // Slot 3 RAM 1M
machine.api.setZ88ChipMask(4, 0x3f); // Slot 4 RAM 1M
machine.api.setZ88SlotMask(3, false); // Chip 4 is RAM
machine.api.writePortCz88(0xd1, 0x20);
machine.api.testWriteCz88Memory(addr, 0x23);
const value = machine.api.testReadCz88Memory(addr);
if ((addr & 0xc000) === 0x4000) {
// RAM area
expect(value).toBe(0x23);
} else {
// ROM area
expect(value).toBe(0);
}
});
});
addresses.forEach((addr) => {
it(`Card 1 RAM (${addr}) can be written`, () => {
machine.reset();
machine.api.setZ88RndSeed(0);
machine.api.setZ88ChipMask(0, 0x1f); // Slot 0 ROM 512K
machine.api.setZ88ChipMask(1, 0x1f); // Slot 1 RAM 512K
machine.api.setZ88ChipMask(2, 0x3f); // Slot 2 RAM 1M
machine.api.setZ88ChipMask(3, 0x3f); // Slot 3 RAM 1M
machine.api.setZ88ChipMask(4, 0x3f); // Slot 4 RAM 1M
machine.api.setZ88SlotMask(3, false); // Chip 4 is RAM
machine.api.writePortCz88(0xd1, 0x40);
machine.api.testWriteCz88Memory(addr, 0x23);
const value = machine.api.testReadCz88Memory(addr);
if ((addr & 0xc000) === 0x4000) {
// RAM area
expect(value).toBe(0x23);
} else {
// ROM area
expect(value).toBe(0);
}
});
});
addresses.forEach((addr) => {
it(`Card 2 RAM (${addr}) can be written`, () => {
machine.reset();
machine.api.setZ88RndSeed(0);
machine.api.setZ88ChipMask(0, 0x1f); // Slot 0 ROM 512K
machine.api.setZ88ChipMask(1, 0x1f); // Slot 1 RAM 512K
machine.api.setZ88ChipMask(2, 0x3f); // Slot 2 RAM 1M
machine.api.setZ88ChipMask(3, 0x3f); // Slot 3 RAM 1M
machine.api.setZ88ChipMask(4, 0x3f); // Slot 4 RAM 1M
machine.api.setZ88SlotMask(3, false); // Chip 4 is RAM
machine.api.writePortCz88(0xd2, 0x80);
machine.api.testWriteCz88Memory(addr, 0x23);
const value = machine.api.testReadCz88Memory(addr);
if ((addr & 0xc000) === 0x8000) {
// RAM area
expect(value).toBe(0x23);
} else {
// ROM area
expect(value).toBe(0);
}
});
});
addresses.forEach((addr) => {
it(`Card 3 RAM (${addr}) can be written`, () => {
machine.reset();
machine.api.setZ88RndSeed(0);
machine.api.setZ88ChipMask(0, 0x1f); // Slot 0 ROM 512K
machine.api.setZ88ChipMask(1, 0x1f); // Slot 1 RAM 512K
machine.api.setZ88ChipMask(2, 0x3f); // Slot 2 RAM 1M
machine.api.setZ88ChipMask(3, 0x3f); // Slot 3 RAM 1M
machine.api.setZ88ChipMask(4, 0x3f); // Slot 4 RAM 1M
machine.api.setZ88SlotMask(3, false); // Chip 4 is RAM
machine.api.writePortCz88(0xd3, 0xc0);
machine.api.testWriteCz88Memory(addr, 0x23);
const value = machine.api.testReadCz88Memory(addr);
if ((addr & 0xc000) === 0xc000) {
// RAM area
expect(value).toBe(0x23);
} else {
// ROM area
expect(value).toBe(0);
}
});
});
addresses.forEach((addr) => {
it(`Card 3 RAM in segment 2 (${addr}) can be written`, () => {
machine.reset();
machine.api.setZ88RndSeed(0);
machine.api.setZ88ChipMask(0, 0x1f); // Slot 0 ROM 512K
machine.api.setZ88ChipMask(1, 0x1f); // Slot 1 RAM 512K
machine.api.setZ88ChipMask(2, 0x3f); // Slot 2 RAM 1M
machine.api.setZ88ChipMask(3, 0x3f); // Slot 3 RAM 1M
machine.api.setZ88ChipMask(4, 0x3f); // Slot 4 RAM 1M
machine.api.setZ88SlotMask(3, false); // Chip 4 is RAM
machine.api.writePortCz88(0xd3, 0x80);
machine.api.testWriteCz88Memory(addr, 0x23);
const value = machine.api.testReadCz88Memory(addr);
if ((addr & 0xc000) === 0xc000) {
// RAM area
expect(value).toBe(0x23);
} else {
// ROM area
expect(value).toBe(0);
}
});
});
addresses.forEach((addr) => {
it(`Card 3 EPROM (${addr}) cannot be written`, () => {
machine.reset();
machine.api.setZ88RndSeed(0);
machine.api.setZ88ChipMask(0, 0x1f); // Slot 0 ROM 512K
machine.api.setZ88ChipMask(1, 0x1f); // Slot 1 RAM 512K
machine.api.setZ88ChipMask(2, 0x3f); // Slot 2 RAM 1M
machine.api.setZ88ChipMask(3, 0x3f); // Slot 3 RAM 1M
machine.api.setZ88ChipMask(4, 0x3f); // Slot 4 RAM 1M
machine.api.setZ88SlotMask(3, true); // Chip 4 is ROM
machine.api.writePortCz88(0xd3, 0xc0);
machine.api.testWriteCz88Memory(addr, 0x23);
const value = machine.api.testReadCz88Memory(addr);
expect(value).toBe(0);
});
});
addresses.forEach((addr) => {
it(`Multiple paged-in RAM (${addr}) can be written`, () => {
machine.reset();
machine.api.setZ88RndSeed(0);
machine.api.setZ88ChipMask(0, 0x1f); // Slot 0 ROM 512K
machine.api.setZ88ChipMask(1, 0x1f); // Slot 1 RAM 512K
machine.api.setZ88ChipMask(2, 0x3f); // Slot 2 RAM 1M
machine.api.setZ88ChipMask(3, 0x3f); // Slot 3 RAM 1M
machine.api.setZ88ChipMask(4, 0x3f); // Slot 4 RAM 1M
machine.api.setZ88SlotMask(3, false); // Chip 4 is RAM
machine.api.writePortCz88(0xd2, 0x80);
machine.api.writePortCz88(0xd3, 0xc0);
machine.api.testWriteCz88Memory(addr, 0x23);
const value = machine.api.testReadCz88Memory(addr);
if ((addr & 0xc000) >= 0x8000) {
// RAM area
expect(value).toBe(0x23);
} else {
// ROM area
expect(value).toBe(0);
}
});
});
const repeatingAddresses: number[] = [0x4000, 0x4567, 0x5f00];
const sizeMasks: number[] = [0x01, 0x03, 0x07, 0x0f, 0x1f, 0x3f];
sizeMasks.forEach((size) => {
repeatingAddresses.forEach((addr) => {
it(`Write/read repeats in internal RAM ${size}/(${addr})`, () => {
machine.reset();
machine.api.setZ88RndSeed(0);
machine.api.setZ88ChipMask(0, 0x1f); // Slot 0 ROM 512K
machine.api.setZ88ChipMask(1, 0x1f); // Slot 1 RAM 512K
machine.api.setZ88ChipMask(2, size); // Passed size
machine.api.setZ88ChipMask(3, 0x3f); // Slot 3 RAM 1M
machine.api.setZ88ChipMask(4, 0x3f); // Slot 4 RAM 1M
machine.api.setZ88SlotMask(3, false); // Chip 4 is RAM
// Even pages
for (let i = 0x40; i < 0x80; i += size + 1) {
machine.api.writePortCz88(0xd1, i);
machine.api.testWriteCz88Memory(addr, 0x23);
const value = machine.api.testReadCz88Memory(addr);
expect(value).toBe(0x23);
for (let j = 0x40 + (size + 1); j < 0x80; j += size + 1) {
machine.api.writePortCz88(0xd1, j);
const value = machine.api.testReadCz88Memory(addr);
expect(value).toBe(0x23);
for (let k = j + 2; k < j + size + 1; k += 2) {
machine.api.writePortCz88(0xd1, k);
const value = machine.api.testReadCz88Memory(addr);
expect(value).toBe(0x00);
}
}
}
// Odd pages
for (let i = 0x41; i < 0x80; i += size + 1) {
machine.api.writePortCz88(0xd1, i);
machine.api.testWriteCz88Memory(addr, 0x23);
const value = machine.api.testReadCz88Memory(addr);
expect(value).toBe(0x23);
for (let j = 0x41 + (size + 1); j < 0x80; j += size + 1) {
machine.api.writePortCz88(0xd1, j);
const value = machine.api.testReadCz88Memory(addr);
expect(value).toBe(0x23);
for (let k = j + 2; k < j + size + 1; k += 2) {
machine.api.writePortCz88(0xd1, k);
const value = machine.api.testReadCz88Memory(addr);
expect(value).toBe(0x00);
}
}
}
});
});
});
}); | the_stack |
import { BPI } from '../bpi/bpi';
import { expect } from 'chai';
import { Workstep } from '../bpi/components/workgroup/workstep';
import { Workgroup } from '../bpi/components/workgroup/workgroup';
import { BpiMessage } from '../bpi/components/messaging/bpiMessage';
import { MockMessagingComponent } from '../bpi/components/messaging/messaging';
import { WorkgroupComponent } from '../bpi/components/workgroup/workgroup.service';
import { IdentityComponent } from '../bpi/components/identity/identity';
import { Order } from '../domain-objects/order';
describe('BPI, Workgroup and Worflow setup', () => {
it('Given beggining of time, Alice initiates a new BPI with her organization as owner and an inital agreement state object, BPI created with inherent owner and a global agreement', () => {
const bpiInstance = new BPI("AL1", "AliceOrganisation", ["555333"], new IdentityComponent(), new MockMessagingComponent(), new WorkgroupComponent());
expect(bpiInstance.identityComponent.getOwnerOrganization().id).to.be.equal("AL1");
expect(bpiInstance.identityComponent.getOwnerOrganization().name).to.be.equal("AliceOrganisation");
expect(bpiInstance.storageComponent.getAgreementState().productIds.length).to.be.equal(1);
expect(bpiInstance.storageComponent.getAgreementState().productIds[0]).to.be.equal("555333");
expect(bpiInstance.storageComponent.getAgreementState().orders).to.be.an("array").that.is.empty;
expect(bpiInstance.storageComponent.getAgreementState().proofs).to.be.an("array").that.is.empty;
});
it('Given freshly instantiated BPI, Alice creates a workgroup, workgroup is added to the BPI and available in the list of workgroups', () => {
const bpiInstance = new BPI("AL1", "AliceOrganisation", ["555333"], new IdentityComponent(), new MockMessagingComponent(), new WorkgroupComponent());
const exchangeOrdersWorkgroup = bpiInstance.addWorkgroup("AB1", "ABOrder", []);
expect(bpiInstance.getWorkgroups().length).to.be.equal(1);
expect(bpiInstance.getWorkgroups()[0].id).to.be.equal(exchangeOrdersWorkgroup.id);
expect(bpiInstance.getWorkgroups()[0].name).to.be.equal(exchangeOrdersWorkgroup.name);
expect(bpiInstance.getWorkgroups()[0].participants.length).to.be.equal(1);
expect(bpiInstance.getWorkgroups()[0].participants[0].id).to.be.equal("AL1");
});
it('Given newly created workgroup, Alice creates a workstep, workstep is added to the workgroup and is visible in the list of worksteps for a given workgroup', () => {
const bpiInstance = new BPI("AL1", "AliceOrganisation", ["555333"], new IdentityComponent(), new MockMessagingComponent(), new WorkgroupComponent());
const exchangeOrdersWorkgroup = bpiInstance.addWorkgroup("AB1", "ABOrder", []);
const workStep = new Workstep("W1", "WRKSTP1");
workStep.setBusinessLogicToExecute(bpiInstance.storageComponent.getAgreementState().addOrder);
exchangeOrdersWorkgroup.addWorkstep(workStep);
expect(exchangeOrdersWorkgroup.worksteps.length).to.be.equal(1);
expect(exchangeOrdersWorkgroup.worksteps.length).to.be.above(0);
expect(exchangeOrdersWorkgroup.worksteps[0]).to.be.equal(workStep);
expect(exchangeOrdersWorkgroup.worksteps[0].id).to.be.equal("W1");
expect(exchangeOrdersWorkgroup.worksteps[0].name).to.be.equal("WRKSTP1");
});
it('Given a prepared workgroup with a workstep, Alice invites Bob, BPI stores the invitation and invitee email and this information is available for querying', () => {
const bpiInstance = new BPI("AL1", "AliceOrganisation", ["555333"], new IdentityComponent(), new MockMessagingComponent(), new WorkgroupComponent());
const exchangeOrdersWorkgroup = bpiInstance.addWorkgroup("AB1", "ABOrder", []);
const workStep = new Workstep("W1", "WRKSTP1");
workStep.setBusinessLogicToExecute(bpiInstance.storageComponent.getAgreementState().addOrder);
exchangeOrdersWorkgroup.addWorkstep(workStep);
bpiInstance.inviteToWorkgroup("BI1", "BobsInvite", bpiInstance.identityComponent.getOwnerOrganization(), "bob@bob.com", exchangeOrdersWorkgroup.id, bpiInstance.storageComponent.getAgreementState());
const bobsInvitation = bpiInstance.getInvitationById("BI1");
expect(bobsInvitation.id).to.be.equal("BI1");
expect(bobsInvitation.name).to.be.equal("BobsInvite");
expect(bobsInvitation.recipient).to.be.equal("bob@bob.com");
expect(bobsInvitation.sender).to.be.equal(bpiInstance.identityComponent.getOwnerOrganization());
expect(bobsInvitation.agreement.productIds).to.be.equal(bpiInstance.storageComponent.getAgreementState().productIds);
});
it('Given a sent invitation, Bob queries list of received invitations, can see invitation details from Alice', () => {
const bpiInstance = new BPI("AL1", "AliceOrganisation", ["555333"], new IdentityComponent(), new MockMessagingComponent(), new WorkgroupComponent());
const exchangeOrdersWorkgroup = bpiInstance.addWorkgroup("AB1", "ABOrder", []);
const workStep = new Workstep("W1", "WRKSTP1");
workStep.setBusinessLogicToExecute(bpiInstance.storageComponent.getAgreementState().addOrder);
exchangeOrdersWorkgroup.addWorkstep(workStep);
bpiInstance.inviteToWorkgroup("BI1", "BobsInvite", bpiInstance.identityComponent.getOwnerOrganization(), "bob@bob.com", exchangeOrdersWorkgroup.id, bpiInstance.storageComponent.getAgreementState());
const bobsInvitations = bpiInstance.getReceivedInvitationsByEmail("bob@bob.com");
expect(bobsInvitations[0].id).to.be.equal("BI1");
expect(bobsInvitations[0].name).to.be.equal("BobsInvite");
expect(bobsInvitations[0].recipient).to.be.equal("bob@bob.com");
expect(bobsInvitations[0].sender).to.be.equal(bpiInstance.identityComponent.getOwnerOrganization());
expect(bobsInvitations[0].agreement.productIds).to.be.equal(bpiInstance.storageComponent.getAgreementState().productIds);
});
it('Given a received invitation, Bob accepts by singing the agreement, Bob is added as a subject to the Bpi, to the collection of workgroup participants and proof is stored in the collection of proofs for the workgroup', () => {
const bpiInstance = new BPI("AL1", "AliceOrganisation", ["555333"], new IdentityComponent(), new MockMessagingComponent(), new WorkgroupComponent());
const exchangeOrdersWorkgroup = bpiInstance.addWorkgroup("AB1", "ABOrder", []);
const workStep = new Workstep("W1", "WRKSTP1");
workStep.setBusinessLogicToExecute(bpiInstance.storageComponent.getAgreementState().addOrder);
exchangeOrdersWorkgroup.addWorkstep(workStep);
bpiInstance.inviteToWorkgroup("BI1", "BobsInvite", bpiInstance.identityComponent.getOwnerOrganization(), "bob@bob.com", exchangeOrdersWorkgroup.id, bpiInstance.storageComponent.getAgreementState());
const bobsInvitation = bpiInstance.getReceivedInvitationsByEmail("bob@bob.com");
//signed invitation "triggers" Bpi to create dummy proof and add Bob to orgs and workgroup participants
bpiInstance.signInvitation(bobsInvitation[0].id, "bobsSignature", "BO1", "BobOrganisation");
const workgroup = bpiInstance.getWorkgroupById(exchangeOrdersWorkgroup.id);
expect(bpiInstance.getOrganizationById("BO1")).to.not.be.undefined;
expect(workgroup.participants.length).to.be.equal(2);
expect(workgroup.participants[1].id).to.be.equal("BO1");
expect(workgroup.participants[1].name).to.be.equal("BobOrganisation");
expect(bpiInstance.storageComponent.getAgreementState().proofs.length).to.be.equal(1);
expect(bpiInstance.storageComponent.getAgreementState().proofs[0].length).to.be.above(0);
});
it('Given accepted invite, Alice queries the list of sent invitations, and can verify the proof aginst the Bpi', () => {
const bpiInstance = new BPI("AL1", "AliceOrganisation", ["555333"], new IdentityComponent(), new MockMessagingComponent(), new WorkgroupComponent());
const exchangeOrdersWorkgroup = bpiInstance.addWorkgroup("AB1", "ABOrder", []);
const workStep = new Workstep("W1", "WRKSTP1");
workStep.setBusinessLogicToExecute(bpiInstance.storageComponent.getAgreementState().addOrder);
exchangeOrdersWorkgroup.addWorkstep(workStep);
bpiInstance.inviteToWorkgroup("BI1", "BobsInvite", bpiInstance.identityComponent.getOwnerOrganization(), "bob@bob.com", exchangeOrdersWorkgroup.id, bpiInstance.storageComponent.getAgreementState());
const bobsInvitation = bpiInstance.getReceivedInvitationsByEmail("bob@bob.com");
bpiInstance.signInvitation(bobsInvitation[0].id, "bobsSignature", "BO1", "BobOrganisation");
const invQuedByAlice = bpiInstance.getInvitationById("BI1");
const proofVerificationResult = bpiInstance.verifyProof(invQuedByAlice.agreement.proofs[0]);
expect(proofVerificationResult).to.be.true;
});
});
describe('Exchanging business objects', () => {
it('Given verified proof, Alice sends request for the order that is valid, the request is verified against the agreement, the proof and order is sent to Bob', () => {
const [bpiInstance, exchangeOrdersWorkgroup, workstep] = setupOrderExchangeWorkgroupWithACleanAgreementState();
const orderBusinessObject = new Order("0001", "Purchase", 30, "555333");
const orgAlice = bpiInstance.identityComponent.getOrganizationById("AL1");
const orgBob = bpiInstance.identityComponent.getOrganizationById("BO1");
const addOrderMessage = new BpiMessage("M1", "STORE", orgAlice, orgBob, exchangeOrdersWorkgroup.id, workstep.id, orderBusinessObject);
// Alice sends to BPI for agreement update, validation and proof generation
const proof = bpiInstance.postMessage(addOrderMessage);
addOrderMessage.setExecutionProof(proof);
// Alice sends to BPI to message to Bob
const orderMessageAdded = createInfoMessageFromStore(addOrderMessage);
bpiInstance.postMessage(orderMessageAdded);
// Bob receives\queries messages and fetches the message from Alice
const receivedMessage = bpiInstance.getMessages(orgBob);
// Bob verifies the state against the BPI
var verificationResult = bpiInstance.verifyProof(receivedMessage[0].executionProof)
expect(bpiInstance.storageComponent.getAgreementState().orders.length).to.be.equal(1);
expect(bpiInstance.storageComponent.getAgreementState().orders[0].acceptanceStatus).to.be.equal("pending");
expect(bpiInstance.storageComponent.getAgreementState().proofs.length).to.be.equal(2);
expect(verificationResult).to.be.true;
});
it('Given newly setup workgroup between Alice and Bob, Alice sends request for the order that is invalid, the request is verified against the agreement, error response is sent back to Alice', () => {
const [bpiInstance, exchangeOrdersWorkgroup, workstep] = setupOrderExchangeWorkgroupWithACleanAgreementState();
const orderBusinessObject = new Order("0001", "Purchase", 15, "555333");
const orgAlice = bpiInstance.identityComponent.getOrganizationById("AL1");
const orgBob = bpiInstance.identityComponent.getOrganizationById("BO1");
const addOrderMessage = new BpiMessage("M1", "STORE", orgAlice, orgBob, exchangeOrdersWorkgroup.id, workstep.id, orderBusinessObject);
const proof = bpiInstance.postMessage(addOrderMessage);
expect(proof).to.be.equal("err: workstep execution failed to satisfy the agreement.");
});
it('Given Bob receives a positive result, Bob performs acceptance, the acceptance is returned to Alice', () => {
const [bpiInstance, exchangeOrdersWorkgroup, workstep] = setupOrderExchangeWorkgroupWithACleanAgreementState();
const orderBusinessObject = new Order("0001", "Purchase", 30, "555333");
const orgAlice = bpiInstance.identityComponent.getOrganizationById("AL1");
const orgBob = bpiInstance.identityComponent.getOrganizationById("BO1");
const addOrderMessage = new BpiMessage("M1", "STORE", orgAlice, orgBob, exchangeOrdersWorkgroup.id, workstep.id, orderBusinessObject);
// Alice sends to BPI for agreement update, validation and proof generation
let proof = bpiInstance.postMessage(addOrderMessage);
addOrderMessage.setExecutionProof(proof);
const orderMessageAdded = createInfoMessageFromStore(addOrderMessage);
bpiInstance.postMessage(orderMessageAdded);
// Create workStep2, set workStep's business logic, and add work step to workgroup
const workstep2 = new Workstep("W2", "WRKSTP2");
workstep2.setBusinessLogicToExecute(bpiInstance.storageComponent.getAgreementState().acceptOrder);
exchangeOrdersWorkgroup.addWorkstep(workstep2);
// Bob sends to BPI for agreement update, vlaidation, and proof generation
const acceptOrderMessage = new BpiMessage("M2", "STORE", orgBob, orgAlice, exchangeOrdersWorkgroup.id, workstep2.id, orderBusinessObject);
proof = bpiInstance.postMessage(acceptOrderMessage);
acceptOrderMessage.setExecutionProof(proof);
const orderMessageAccepted = createInfoMessageFromStore(acceptOrderMessage);
bpiInstance.postMessage(orderMessageAccepted);
// Alice receives\queries messages and fetches the message from Bob
const receivedMessage = bpiInstance.getMessages(orgAlice);
// Alice verifies the state against the BPI
const verificationResult = bpiInstance.verifyProof(receivedMessage[0].executionProof);
expect(bpiInstance.storageComponent.getAgreementState().orders.length).to.be.equal(1);
expect(bpiInstance.storageComponent.getAgreementState().orders[0].acceptanceStatus).to.be.equal("accepted");
expect(bpiInstance.storageComponent.getAgreementState().proofs.length).to.be.equal(3);
expect(bpiInstance.storageComponent.getAgreementState().proofs[2]).to.be.equal(proof);
expect(verificationResult).to.be.true;
});
function setupOrderExchangeWorkgroupWithACleanAgreementState(): [BPI, Workgroup, Workstep] {
const bpiInstance = new BPI("AL1", "AliceOrganisation", ["555333"], new IdentityComponent(), new MockMessagingComponent(), new WorkgroupComponent());
const exchangeOrdersWorkgroup = bpiInstance.addWorkgroup("AB1", "ABOrder", []);
const workStep = new Workstep("W1", "WRKSTP1");
workStep.setBusinessLogicToExecute(bpiInstance.storageComponent.getAgreementState().addOrder);
exchangeOrdersWorkgroup.addWorkstep(workStep);
bpiInstance.inviteToWorkgroup("BI1", "BobsInvite", bpiInstance.identityComponent.getOwnerOrganization(), "bob@bob.com", exchangeOrdersWorkgroup.id, bpiInstance.storageComponent.getAgreementState());
const bobsInvitation = bpiInstance.getReceivedInvitationsByEmail("bob@bob.com");
bpiInstance.signInvitation(bobsInvitation[0].id, "bobsSignature", "BO1", "BobOrganisation");
return [bpiInstance, exchangeOrdersWorkgroup, workStep];
}
function createInfoMessageFromStore(storeMessage: BpiMessage): BpiMessage {
const infoMessage = {} as BpiMessage;
Object.assign(infoMessage, storeMessage);
infoMessage.type = "INFO";
return infoMessage;
}
}); | the_stack |
import * as msRest from "@azure/ms-rest-js";
import * as msRestAzure from "@azure/ms-rest-azure-js";
import * as Models from "../models";
import * as Mappers from "../models/zonesMappers";
import * as Parameters from "../models/parameters";
import { DnsManagementClientContext } from "../dnsManagementClientContext";
/** Class representing a Zones. */
export class Zones {
private readonly client: DnsManagementClientContext;
/**
* Create a Zones.
* @param {DnsManagementClientContext} client Reference to the service client.
*/
constructor(client: DnsManagementClientContext) {
this.client = client;
}
/**
* Creates or updates a DNS zone. Does not modify DNS records within the zone.
* @param resourceGroupName The name of the resource group.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param parameters Parameters supplied to the CreateOrUpdate operation.
* @param [options] The optional parameters
* @returns Promise<Models.ZonesCreateOrUpdateResponse>
*/
createOrUpdate(resourceGroupName: string, zoneName: string, parameters: Models.Zone, options?: Models.ZonesCreateOrUpdateOptionalParams): Promise<Models.ZonesCreateOrUpdateResponse>;
/**
* @param resourceGroupName The name of the resource group.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param parameters Parameters supplied to the CreateOrUpdate operation.
* @param callback The callback
*/
createOrUpdate(resourceGroupName: string, zoneName: string, parameters: Models.Zone, callback: msRest.ServiceCallback<Models.Zone>): void;
/**
* @param resourceGroupName The name of the resource group.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param parameters Parameters supplied to the CreateOrUpdate operation.
* @param options The optional parameters
* @param callback The callback
*/
createOrUpdate(resourceGroupName: string, zoneName: string, parameters: Models.Zone, options: Models.ZonesCreateOrUpdateOptionalParams, callback: msRest.ServiceCallback<Models.Zone>): void;
createOrUpdate(resourceGroupName: string, zoneName: string, parameters: Models.Zone, options?: Models.ZonesCreateOrUpdateOptionalParams | msRest.ServiceCallback<Models.Zone>, callback?: msRest.ServiceCallback<Models.Zone>): Promise<Models.ZonesCreateOrUpdateResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
zoneName,
parameters,
options
},
createOrUpdateOperationSpec,
callback) as Promise<Models.ZonesCreateOrUpdateResponse>;
}
/**
* Deletes a DNS zone. WARNING: All DNS records in the zone will also be deleted. This operation
* cannot be undone.
* @param resourceGroupName The name of the resource group.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param [options] The optional parameters
* @returns Promise<msRest.RestResponse>
*/
deleteMethod(resourceGroupName: string, zoneName: string, options?: Models.ZonesDeleteMethodOptionalParams): Promise<msRest.RestResponse> {
return this.beginDeleteMethod(resourceGroupName,zoneName,options)
.then(lroPoller => lroPoller.pollUntilFinished());
}
/**
* Gets a DNS zone. Retrieves the zone properties, but not the record sets within the zone.
* @param resourceGroupName The name of the resource group.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param [options] The optional parameters
* @returns Promise<Models.ZonesGetResponse>
*/
get(resourceGroupName: string, zoneName: string, options?: msRest.RequestOptionsBase): Promise<Models.ZonesGetResponse>;
/**
* @param resourceGroupName The name of the resource group.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param callback The callback
*/
get(resourceGroupName: string, zoneName: string, callback: msRest.ServiceCallback<Models.Zone>): void;
/**
* @param resourceGroupName The name of the resource group.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param options The optional parameters
* @param callback The callback
*/
get(resourceGroupName: string, zoneName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.Zone>): void;
get(resourceGroupName: string, zoneName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.Zone>, callback?: msRest.ServiceCallback<Models.Zone>): Promise<Models.ZonesGetResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
zoneName,
options
},
getOperationSpec,
callback) as Promise<Models.ZonesGetResponse>;
}
/**
* Updates a DNS zone. Does not modify DNS records within the zone.
* @param resourceGroupName The name of the resource group.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param parameters Parameters supplied to the Update operation.
* @param [options] The optional parameters
* @returns Promise<Models.ZonesUpdateResponse>
*/
update(resourceGroupName: string, zoneName: string, parameters: Models.ZoneUpdate, options?: Models.ZonesUpdateOptionalParams): Promise<Models.ZonesUpdateResponse>;
/**
* @param resourceGroupName The name of the resource group.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param parameters Parameters supplied to the Update operation.
* @param callback The callback
*/
update(resourceGroupName: string, zoneName: string, parameters: Models.ZoneUpdate, callback: msRest.ServiceCallback<Models.Zone>): void;
/**
* @param resourceGroupName The name of the resource group.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param parameters Parameters supplied to the Update operation.
* @param options The optional parameters
* @param callback The callback
*/
update(resourceGroupName: string, zoneName: string, parameters: Models.ZoneUpdate, options: Models.ZonesUpdateOptionalParams, callback: msRest.ServiceCallback<Models.Zone>): void;
update(resourceGroupName: string, zoneName: string, parameters: Models.ZoneUpdate, options?: Models.ZonesUpdateOptionalParams | msRest.ServiceCallback<Models.Zone>, callback?: msRest.ServiceCallback<Models.Zone>): Promise<Models.ZonesUpdateResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
zoneName,
parameters,
options
},
updateOperationSpec,
callback) as Promise<Models.ZonesUpdateResponse>;
}
/**
* Lists the DNS zones within a resource group.
* @param resourceGroupName The name of the resource group.
* @param [options] The optional parameters
* @returns Promise<Models.ZonesListByResourceGroupResponse>
*/
listByResourceGroup(resourceGroupName: string, options?: Models.ZonesListByResourceGroupOptionalParams): Promise<Models.ZonesListByResourceGroupResponse>;
/**
* @param resourceGroupName The name of the resource group.
* @param callback The callback
*/
listByResourceGroup(resourceGroupName: string, callback: msRest.ServiceCallback<Models.ZoneListResult>): void;
/**
* @param resourceGroupName The name of the resource group.
* @param options The optional parameters
* @param callback The callback
*/
listByResourceGroup(resourceGroupName: string, options: Models.ZonesListByResourceGroupOptionalParams, callback: msRest.ServiceCallback<Models.ZoneListResult>): void;
listByResourceGroup(resourceGroupName: string, options?: Models.ZonesListByResourceGroupOptionalParams | msRest.ServiceCallback<Models.ZoneListResult>, callback?: msRest.ServiceCallback<Models.ZoneListResult>): Promise<Models.ZonesListByResourceGroupResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
options
},
listByResourceGroupOperationSpec,
callback) as Promise<Models.ZonesListByResourceGroupResponse>;
}
/**
* Lists the DNS zones in all resource groups in a subscription.
* @param [options] The optional parameters
* @returns Promise<Models.ZonesListResponse>
*/
list(options?: Models.ZonesListOptionalParams): Promise<Models.ZonesListResponse>;
/**
* @param callback The callback
*/
list(callback: msRest.ServiceCallback<Models.ZoneListResult>): void;
/**
* @param options The optional parameters
* @param callback The callback
*/
list(options: Models.ZonesListOptionalParams, callback: msRest.ServiceCallback<Models.ZoneListResult>): void;
list(options?: Models.ZonesListOptionalParams | msRest.ServiceCallback<Models.ZoneListResult>, callback?: msRest.ServiceCallback<Models.ZoneListResult>): Promise<Models.ZonesListResponse> {
return this.client.sendOperationRequest(
{
options
},
listOperationSpec,
callback) as Promise<Models.ZonesListResponse>;
}
/**
* Deletes a DNS zone. WARNING: All DNS records in the zone will also be deleted. This operation
* cannot be undone.
* @param resourceGroupName The name of the resource group.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginDeleteMethod(resourceGroupName: string, zoneName: string, options?: Models.ZonesBeginDeleteMethodOptionalParams): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
resourceGroupName,
zoneName,
options
},
beginDeleteMethodOperationSpec,
options);
}
/**
* Lists the DNS zones within a resource group.
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param [options] The optional parameters
* @returns Promise<Models.ZonesListByResourceGroupNextResponse>
*/
listByResourceGroupNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.ZonesListByResourceGroupNextResponse>;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param callback The callback
*/
listByResourceGroupNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.ZoneListResult>): void;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param options The optional parameters
* @param callback The callback
*/
listByResourceGroupNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ZoneListResult>): void;
listByResourceGroupNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ZoneListResult>, callback?: msRest.ServiceCallback<Models.ZoneListResult>): Promise<Models.ZonesListByResourceGroupNextResponse> {
return this.client.sendOperationRequest(
{
nextPageLink,
options
},
listByResourceGroupNextOperationSpec,
callback) as Promise<Models.ZonesListByResourceGroupNextResponse>;
}
/**
* Lists the DNS zones in all resource groups in a subscription.
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param [options] The optional parameters
* @returns Promise<Models.ZonesListNextResponse>
*/
listNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.ZonesListNextResponse>;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param callback The callback
*/
listNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.ZoneListResult>): void;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param options The optional parameters
* @param callback The callback
*/
listNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ZoneListResult>): void;
listNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ZoneListResult>, callback?: msRest.ServiceCallback<Models.ZoneListResult>): Promise<Models.ZonesListNextResponse> {
return this.client.sendOperationRequest(
{
nextPageLink,
options
},
listNextOperationSpec,
callback) as Promise<Models.ZonesListNextResponse>;
}
}
// Operation Specifications
const serializer = new msRest.Serializer(Mappers);
const createOrUpdateOperationSpec: msRest.OperationSpec = {
httpMethod: "PUT",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/dnsZones/{zoneName}",
urlParameters: [
Parameters.resourceGroupName,
Parameters.zoneName,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.ifMatch,
Parameters.ifNoneMatch,
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "parameters",
mapper: {
...Mappers.Zone,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.Zone
},
201: {
bodyMapper: Mappers.Zone
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const getOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/dnsZones/{zoneName}",
urlParameters: [
Parameters.resourceGroupName,
Parameters.zoneName,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.Zone
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const updateOperationSpec: msRest.OperationSpec = {
httpMethod: "PATCH",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/dnsZones/{zoneName}",
urlParameters: [
Parameters.resourceGroupName,
Parameters.zoneName,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.ifMatch,
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "parameters",
mapper: {
...Mappers.ZoneUpdate,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.Zone
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listByResourceGroupOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/dnsZones",
urlParameters: [
Parameters.resourceGroupName,
Parameters.subscriptionId
],
queryParameters: [
Parameters.top,
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.ZoneListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/providers/Microsoft.Network/dnszones",
urlParameters: [
Parameters.subscriptionId
],
queryParameters: [
Parameters.top,
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.ZoneListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const beginDeleteMethodOperationSpec: msRest.OperationSpec = {
httpMethod: "DELETE",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/dnsZones/{zoneName}",
urlParameters: [
Parameters.resourceGroupName,
Parameters.zoneName,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.ifMatch,
Parameters.acceptLanguage
],
responses: {
200: {},
202: {},
204: {},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listByResourceGroupNextOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
baseUrl: "https://management.azure.com",
path: "{nextLink}",
urlParameters: [
Parameters.nextPageLink
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.ZoneListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listNextOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
baseUrl: "https://management.azure.com",
path: "{nextLink}",
urlParameters: [
Parameters.nextPageLink
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.ZoneListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
}; | the_stack |
import { ChallengeRegistry } from "@connext/contracts";
import {
AppInstanceJson,
Bytes32,
ChallengeUpdatedEventPayload,
ConditionalTransactionCommitmentJSON,
IBackupService,
IStoreService,
ILoggerService,
MinimalTransaction,
SetStateCommitmentJSON,
StateChannelJSON,
StateProgressedEventPayload,
STORE_SCHEMA_VERSION,
StoredAppChallenge,
WithdrawalMonitorObject,
StoredAppChallengeStatus,
JsonRpcProvider,
Contract,
ChallengeEvents,
} from "@connext/types";
import { toBN, nullLogger, getSignerAddressFromPublicIdentifier, stringify } from "@connext/utils";
import pWaterfall from "p-waterfall";
import { utils } from "ethers";
import { storeKeys } from "./constants";
import { KeyValueStorage } from "./types";
const {
BLOCK_PROCESSED,
CHALLENGE,
CHALLENGE_UPDATED_EVENT,
CHANNEL,
CONDITIONAL_COMMITMENT,
SET_STATE_COMMITMENT,
SETUP_COMMITMENT,
STATE_PROGRESSED_EVENT,
STORE,
STORE_SCHEMA_VERSION: STORE_SCHEMA_VERSION_KEY,
WITHDRAWAL_COMMITMENT,
} = storeKeys;
const { defaultAbiCoder } = utils;
const properlyConvertChannelNullVals = (json: any): StateChannelJSON => {
return {
...json,
proposedAppInstances:
json.proposedAppInstances &&
json.proposedAppInstances.map(([id, proposal]) => [id, proposal]),
appInstances: json.appInstances && json.appInstances.map(([id, app]) => [id, app]),
};
};
/**
* This class wraps a general key value storage service to become an `IStoreService`
*/
export class StoreService implements IStoreService {
private deferred: ((store: any) => Promise<any>)[] = [];
constructor(
private readonly storage: KeyValueStorage,
private readonly backupService?: IBackupService,
private readonly log: ILoggerService = nullLogger,
) {}
////////////////////////////////////////
//// Admin methods
init(): Promise<void> {
return this.storage.init();
}
async clear(): Promise<void> {
const keys = await this.storage.getKeys();
for (const key of keys) {
await this.storage.removeItem(key);
}
return this.storage.setItem(STORE, {});
}
close(): Promise<void> {
return this.storage.close();
}
async restore(): Promise<void> {
await this.clear();
if (!this.backupService) {
throw new Error(`No backup provided, store cleared`);
}
const pairs = await this.backupService.restore();
if (!pairs) {
this.log.warn(`Warning: unable to restore data from the backup service. Doing nothing.`);
return;
}
return this.execute(async (store) => {
const backupStore = pairs!.find((pair) => pair && pair!.path === STORE);
if (backupStore) {
return this.saveStore(backupStore.value);
} else {
this.log.warn(`Warning: Restored store has the wrong prefix. Doing nothing.`);
return;
}
});
}
////////////////////////////////////////
//// Misc Getters
async getActiveChallenges(): Promise<StoredAppChallenge[]> {
// get all stored challenges
const keys = await this.storage.getKeys();
const challengeKeys = keys.filter(
(key) =>
key.includes(CHALLENGE) && !key.includes(CHALLENGE_UPDATED_EVENT),
);
const inactiveStatuses = [
StoredAppChallengeStatus.NO_CHALLENGE,
StoredAppChallengeStatus.CONDITIONAL_SENT,
];
const challenges = await Promise.all(challengeKeys.map((key) => this.storage.getItem(key)));
return challenges.filter(
(challenge) => !!challenge && !inactiveStatuses.find((status) => status === challenge.status),
);
}
async getFreeBalance(multisigAddress: string): Promise<AppInstanceJson | undefined> {
const channel = await this.getStateChannel(multisigAddress);
if (!channel || !channel.freeBalanceAppInstance) {
return undefined;
}
return channel.freeBalanceAppInstance;
}
async getLatestProcessedBlock(): Promise<number> {
const key = this.storage.getKey(BLOCK_PROCESSED);
const item = await this.storage.getItem(key);
return item ? parseInt(`${item}`) : 0;
}
////////////////////////////////////////
//// Misc Setters
async addOnchainAction(appIdentityHash: Bytes32, provider: JsonRpcProvider): Promise<void> {
// fetch existing data
const store = await this.execute((store) => store);
const channel = await this.getStateChannelByAppIdentityHash(appIdentityHash);
if (!channel) {
throw new Error(`Could not find channel for app ${appIdentityHash}`);
}
const ourAppEntry = channel.appInstances.find(([id]) => id === appIdentityHash);
if (!ourAppEntry) {
throw new Error(`Could not find app with identity hash ${appIdentityHash}}`);
}
const ourApp = ourAppEntry![1];
const ourLatestSetState = this.getLatestSetStateCommitment(store, appIdentityHash);
if (!ourApp || !ourLatestSetState) {
throw new Error(`No record of channel or app associated with ${appIdentityHash}`);
}
// fetch onchain data
const registry = new Contract(
ourLatestSetState.challengeRegistryAddress,
ChallengeRegistry.abi,
provider,
);
const onchainChallenge = await registry.getAppChallenge(appIdentityHash);
if (onchainChallenge.versionNumber.eq(ourLatestSetState.versionNumber)) {
return;
}
// only need state progressed events because challenge should contain
// all relevant information from challenge updated events
const fromBlock = (await provider.getBlockNumber()) - 8640; // last 24h
const rawProgressedLogs = await provider.getLogs({
// TODO: filter state progressed by appID
...registry.filters[ChallengeEvents.StateProgressed](),
fromBlock: fromBlock < 0 ? 0 : fromBlock,
});
const onchainProgressedLogs = rawProgressedLogs.map((log) => {
const {
identityHash,
action,
versionNumber,
timeout,
turnTaker,
signature,
} = registry.interface.parseLog(log).args;
return { identityHash, action, versionNumber, timeout, turnTaker, signature };
});
// get the expected final state from the onchain data
const {
action: encodedAction,
versionNumber,
timeout,
turnTaker,
signature,
} = onchainProgressedLogs.sort((a, b) => b.versionNumber.sub(a.versionNumber).toNumber())[0];
// ensure action from event can be applied on top of our app
if (!versionNumber.eq(ourApp.latestVersionNumber + 1)) {
throw new Error(
`Action cannot be applied directly onto our record of app. Record has nonce of ${
ourApp.latestVersionNumber
}, and action results in nonce ${versionNumber.toString()}`,
);
}
// generate set state commitment + update app instance
// we CANNOT generate any signatures here, and instead will save the
// app as a single signed update. (i.e. as in the take-action protocol
// for the initiator). This means there will NOT be an app instance
// saved at the same nonce as the most recent single signed set-state
// commitment
const appSigners = [ourApp.initiatorIdentifier, ourApp.responderIdentifier].map(
getSignerAddressFromPublicIdentifier,
);
const turnTakerIdx = appSigners.findIndex((signer) => signer === turnTaker);
const setStateJson = {
...ourLatestSetState,
versionNumber: onchainChallenge.versionNumber,
appStateHash: onchainChallenge.appStateHash,
signatures: turnTakerIdx === 0 ? [signature, undefined] : [undefined, signature],
stateTimeout: timeout,
};
const encoding = ourApp?.abiEncodings?.actionEncoding;
if (!encoding) {
throw new Error(`App has no action encoding: ${stringify(ourApp)}`);
}
const updatedApp = {
...ourApp,
latestAction: defaultAbiCoder.decode([encoding!], encodedAction),
};
await this.updateAppInstance(channel.multisigAddress, updatedApp, setStateJson);
}
updateLatestProcessedBlock(blockNumber: number): Promise<void> {
const key = this.storage.getKey(BLOCK_PROCESSED);
return this.storage.setItem(key, blockNumber);
}
async updateNumProposedApps(
multisigAddress: string,
numProposedApps: number,
stateChannel: StateChannelJSON,
): Promise<void> {
return this.execute((store) => {
const channel = this.getStateChannelFromStore(store, multisigAddress);
if (!channel) {
throw new Error(`Can't incremement number of proposed apps without channel`);
}
const updatedStore = this.setStateChannel(store, {
...channel,
monotonicNumProposedApps: numProposedApps,
});
return this.saveStore(updatedStore);
});
}
////////////////////////////////////////
//// AppChallenges
async getAppChallenge(identityHash: string): Promise<StoredAppChallenge | undefined> {
const key = this.storage.getKey(CHALLENGE, identityHash);
const existing = await this.storage.getItem<StoredAppChallenge>(key);
if (existing === null) {
return undefined;
}
return existing;
}
async saveAppChallenge(data: ChallengeUpdatedEventPayload | StoredAppChallenge): Promise<void> {
const key = this.storage.getKey(CHALLENGE, data.identityHash);
return this.storage.setItem(key, data);
}
////////////////////////////////////////
//// AppInstance
async createAppInstance(
multisigAddress: string,
appInstance: AppInstanceJson,
freeBalanceAppInstance: AppInstanceJson,
signedFreeBalanceUpdate: SetStateCommitmentJSON,
): Promise<void> {
return this.execute((store) => {
const channel = this.getStateChannelFromStore(store, multisigAddress);
if (!channel) {
throw new Error(`Can't create app instance without channel`);
}
if (this.hasAppIdentityHash(appInstance.identityHash, channel.appInstances)) {
this.log.warn(
`appInstance.identityHash ${appInstance.identityHash} already exists, will not add appInstance to ${multisigAddress}`,
);
} else {
// add app instance
this.log.debug(`Adding app instance ${appInstance.identityHash} to channel`);
channel.appInstances.push([appInstance.identityHash, appInstance]);
// remove proposal
const idx = channel.proposedAppInstances.findIndex(
([app]) => app === appInstance.identityHash,
);
channel.proposedAppInstances.splice(idx, 1);
this.log.debug(`Removed from proposals`);
}
const oldFreeBalanceUpdate = this.getLatestSetStateCommitment(
store,
freeBalanceAppInstance.identityHash,
);
let updatedStore = store;
if (oldFreeBalanceUpdate) {
this.log.debug(
`Removing stale free balance update at nonce ${toBN(
oldFreeBalanceUpdate.versionNumber,
).toString()}`,
);
updatedStore = this.unsetSetStateCommitment(
updatedStore,
freeBalanceAppInstance.identityHash,
toBN(oldFreeBalanceUpdate.versionNumber).toString(),
);
}
this.log.debug(
`Adding conditional transaction, new free balance state, and revised channel to store`,
);
updatedStore = this.setSetStateCommitment(
this.setStateChannel(store, { ...channel, freeBalanceAppInstance }),
freeBalanceAppInstance.identityHash,
signedFreeBalanceUpdate,
);
return this.saveStore(updatedStore);
});
}
async getAppInstance(appIdentityHash: string): Promise<AppInstanceJson | undefined> {
const channel = await this.getStateChannelByAppIdentityHash(appIdentityHash);
if (!channel) {
return undefined;
}
const toSearch = !!channel.freeBalanceAppInstance
? channel.appInstances.concat([
[channel.freeBalanceAppInstance.identityHash, channel.freeBalanceAppInstance],
])
: channel.appInstances;
if (!this.hasAppIdentityHash(appIdentityHash, toSearch)) {
return undefined;
}
const appEntry = toSearch.find(([id]) => id === appIdentityHash);
if (!appEntry || !appEntry[1]) {
return undefined;
}
return appEntry[1];
}
async updateAppInstance(
multisigAddress: string,
appInstance: AppInstanceJson,
signedSetStateCommitment: SetStateCommitmentJSON,
): Promise<void> {
return this.execute((store) => {
const channel = this.getStateChannelFromStore(store, multisigAddress);
if (!channel) {
throw new Error(`Can't update app instance without channel`);
}
if (!this.hasAppIdentityHash(appInstance.identityHash, channel.appInstances)) {
throw new Error(`Could not find app instance with hash ${appInstance.identityHash}`);
}
const idx = channel.appInstances.findIndex(([app]) => app === appInstance.identityHash);
channel.appInstances[idx] = [appInstance.identityHash, appInstance];
this.log.debug(`Updated existing app instance`);
const oldCommitment = this.getLatestSetStateCommitment(store, appInstance.identityHash);
let updatedStore = store;
if (oldCommitment && signedSetStateCommitment.signatures.filter((x) => !!x).length === 2) {
this.log.debug(
`Removing stale commitment at ${toBN(oldCommitment.versionNumber).toString()}`,
);
updatedStore = this.unsetSetStateCommitment(
updatedStore,
appInstance.identityHash,
toBN(oldCommitment.versionNumber).toString(),
);
}
this.log.debug(
`Updating channel with new app instance at nonce ${appInstance.latestVersionNumber}`,
);
updatedStore = this.setSetStateCommitment(
this.setStateChannel(store, channel),
appInstance.identityHash,
signedSetStateCommitment,
);
return this.saveStore(updatedStore);
});
}
async removeAppInstance(
multisigAddress: string,
appInstance: AppInstanceJson,
freeBalanceAppInstance: AppInstanceJson,
signedFreeBalanceUpdate: SetStateCommitmentJSON,
): Promise<void> {
return this.execute((store) => {
const channel = this.getStateChannelFromStore(store, multisigAddress);
if (!channel) {
this.log.debug(
`No channel found in store with multisig: ${multisigAddress}, doing nothing`,
);
return store;
}
if (!channel.freeBalanceAppInstance) {
this.log.debug(
`No free balance app found in store with multisig: ${multisigAddress}, doing nothing`,
);
return store;
}
if (!this.hasAppIdentityHash(appInstance.identityHash, channel.appInstances)) {
// does not exist
this.log.debug(
`No app with hash ${appInstance.identityHash} found in channel with multisig ${multisigAddress}`,
);
return store;
}
const idx = channel.appInstances.findIndex(([app]) => app === appInstance.identityHash);
const presplice = channel.appInstances.length;
channel.appInstances.splice(idx, 1);
this.log.debug(
`Removed app instance from channel (prev length: ${presplice}, curr: ${channel.appInstances.length})`,
);
const oldFreeBalanceUpdate = this.getLatestSetStateCommitment(
store,
freeBalanceAppInstance.identityHash,
);
let updatedStore = store;
if (oldFreeBalanceUpdate) {
this.log.debug(
`Unsetting free balance set state commitment at nonce ${toBN(
oldFreeBalanceUpdate.versionNumber,
).toString()}`,
);
updatedStore = this.unsetSetStateCommitment(
updatedStore,
freeBalanceAppInstance.identityHash,
toBN(oldFreeBalanceUpdate.versionNumber).toString(),
);
}
this.log.debug(`Removing app commitments from store`);
updatedStore = this.unsetConditionalTransactionCommitment(
this.unsetSetStateCommitment(
updatedStore,
appInstance.identityHash,
toBN(appInstance.latestVersionNumber).toString(),
),
appInstance.identityHash,
);
this.log.debug(`Updating channel with new free balance updates without app instance`);
updatedStore = this.setSetStateCommitment(
this.setStateChannel(store, {
...channel,
freeBalanceAppInstance,
}),
channel.freeBalanceAppInstance!.identityHash,
signedFreeBalanceUpdate,
);
this.log.debug(`Saved updated store for channel nonce ${channel.monotonicNumProposedApps}`);
return this.saveStore(updatedStore);
});
}
////////////////////////////////////////
//// AppProposal
async createAppProposal(
multisigAddress: string,
appInstance: AppInstanceJson,
monotonicNumProposedApps: number,
signedSetStateCommitment: SetStateCommitmentJSON,
signedConditionalTxCommitment: ConditionalTransactionCommitmentJSON,
): Promise<void> {
return this.execute((store) => {
const channel = this.getStateChannelFromStore(store, multisigAddress);
if (!channel) {
throw new Error(`Can't save app proposal without channel`);
}
if (this.hasAppIdentityHash(appInstance.identityHash, channel.proposedAppInstances)) {
this.log.warn(
`appInstance.identityHash ${appInstance.identityHash} already exists, will not add appInstance to ${multisigAddress}`,
);
} else {
this.log.debug(`Adding proposal ${appInstance.identityHash} to store`);
channel.proposedAppInstances.push([appInstance.identityHash, appInstance]);
}
this.log.debug(`Adding set state commitment to store, and updating channel`);
const updatedStore = this.setConditionalTransactionCommitment(
this.setSetStateCommitment(
this.setStateChannel(store, { ...channel, monotonicNumProposedApps }),
appInstance.identityHash,
signedSetStateCommitment,
),
appInstance.identityHash,
signedConditionalTxCommitment,
);
return this.saveStore(updatedStore);
});
}
async getAppProposal(appIdentityHash: string): Promise<AppInstanceJson | undefined> {
const channel = await this.getStateChannelByAppIdentityHash(appIdentityHash);
if (!channel) {
return undefined;
}
if (!this.hasAppIdentityHash(appIdentityHash, channel.proposedAppInstances)) {
return undefined;
}
const proposalEntry = channel.proposedAppInstances.find(([id]) => id === appIdentityHash);
if (!proposalEntry || !proposalEntry[1]) {
return undefined;
}
return proposalEntry[1];
}
async removeAppProposal(multisigAddress: string, appIdentityHash: string): Promise<void> {
return this.execute((store) => {
const channel = this.getStateChannelFromStore(store, multisigAddress);
if (!channel) {
this.log.debug(`Could not find channel at ${multisigAddress}, doing nothing`);
return store;
}
if (!this.hasAppIdentityHash(appIdentityHash, channel.proposedAppInstances)) {
this.log.debug(`Could not find proposal with ${appIdentityHash} in channel, doing nothing`);
return store;
}
this.log.debug(`Removing proposal for ${appIdentityHash}`);
const idx = channel.proposedAppInstances.findIndex(([app]) => app === appIdentityHash);
channel.proposedAppInstances.splice(idx, 1);
// TODO: remove set state commitment
const updatedStore = this.setStateChannel(store, channel);
return this.saveStore(updatedStore);
});
}
////////////////////////////////////////
//// ChallengeUpdatedEvents
async getChallengeUpdatedEvents(
appIdentityHash: string,
): Promise<ChallengeUpdatedEventPayload[]> {
const key = this.storage.getKey(CHALLENGE_UPDATED_EVENT, appIdentityHash);
const stored = await this.storage.getItem<ChallengeUpdatedEventPayload[]>(key);
return stored || [];
}
async createChallengeUpdatedEvent(event: ChallengeUpdatedEventPayload): Promise<void> {
const key = this.storage.getKey(CHALLENGE_UPDATED_EVENT, event.identityHash);
const stored = await this.getChallengeUpdatedEvents(event.identityHash);
const existing = stored.find(
(e) => e.status === event.status && toBN(e.versionNumber).eq(event.versionNumber),
);
if (existing) {
return;
}
stored.push(event);
this.log.debug(`Adding challenge updated event: ${stringify(event)}`);
return this.storage.setItem(key, stored);
}
////////////////////////////////////////
//// Commitments
async getConditionalTransactionCommitment(
appIdentityHash: string,
): Promise<ConditionalTransactionCommitmentJSON | undefined> {
const conditionalCommitmentKey = this.storage.getKey(CONDITIONAL_COMMITMENT, appIdentityHash);
const item = await this.getItem<ConditionalTransactionCommitmentJSON>(conditionalCommitmentKey);
if (!item) {
return undefined;
}
return item;
}
async getSetStateCommitments(appIdentityHash: string): Promise<SetStateCommitmentJSON[]> {
// get all stored challenges
const key = this.storage.getKey(SET_STATE_COMMITMENT, appIdentityHash);
const store = await this.execute((store) => store);
return store[key] || [];
}
async getSetupCommitment(multisigAddress: string): Promise<MinimalTransaction | undefined> {
const setupCommitmentKey = this.storage.getKey(SETUP_COMMITMENT, multisigAddress);
const item = await this.getItem<MinimalTransaction>(setupCommitmentKey);
if (!item) {
return undefined;
}
return item;
}
////////////////////////////////////////
//// SchemaVersion
async getSchemaVersion(): Promise<number> {
const version = await this.getItem<{ version: number }>(STORE_SCHEMA_VERSION_KEY);
return version?.version || 0;
}
updateSchemaVersion(version: number = STORE_SCHEMA_VERSION): Promise<void> {
if (STORE_SCHEMA_VERSION < version) {
throw new Error(`Unrecognized store version: ${version}`);
}
return this.setItem<{ version: number }>(STORE_SCHEMA_VERSION_KEY, { version });
}
////////////////////////////////////////
//// StateProgressedEvents
async getStateProgressedEvents(appIdentityHash: string): Promise<StateProgressedEventPayload[]> {
const key = this.storage.getKey(STATE_PROGRESSED_EVENT, appIdentityHash);
const stored = await this.storage.getItem<StateProgressedEventPayload[]>(key);
return stored || [];
}
async createStateProgressedEvent(event: StateProgressedEventPayload): Promise<void> {
const key = this.storage.getKey(STATE_PROGRESSED_EVENT, event.identityHash);
const stored = await this.getStateProgressedEvents(event.identityHash);
const existing = stored.find((e) => toBN(e.versionNumber).eq(event.versionNumber));
if (existing) {
return;
}
stored.push(event);
return this.storage.setItem(key, stored);
}
////////////////////////////////////////
//// StateChannels
async getStateChannel(multisigAddress: string): Promise<StateChannelJSON | undefined> {
const channelKey = this.storage.getKey(CHANNEL, multisigAddress);
const item = await this.getItem<StateChannelJSON>(channelKey);
return item ? properlyConvertChannelNullVals(item) : undefined;
}
async getStateChannelByAppIdentityHash(
appIdentityHash: string,
): Promise<StateChannelJSON | undefined> {
const channels = await this.getAllChannels();
return channels.find((channel) => {
return (
channel.proposedAppInstances.find(([app]) => app === appIdentityHash) ||
channel.appInstances.find(([app]) => app === appIdentityHash) ||
(
channel.freeBalanceAppInstance &&
channel.freeBalanceAppInstance!.identityHash === appIdentityHash
)
);
});
}
async getStateChannelByOwnersAndChainId(
owners: string[],
chainId: number,
): Promise<StateChannelJSON | undefined> {
const channels = await this.getAllChannels();
return channels.find(
(channel) =>
[...channel.userIdentifiers].sort().toString() === owners.sort().toString() &&
channel.chainId === chainId,
);
}
async getAllChannels(): Promise<StateChannelJSON[]> {
const channelKeys = (await this.getKeys()).filter((key) => key.includes(CHANNEL));
const store = await this.execute((store) => store);
return channelKeys
.map((key) => (store[key] ? properlyConvertChannelNullVals(store[key]) : undefined))
.filter((channel) => !!channel) as StateChannelJSON[];
}
async createStateChannel(
stateChannel: StateChannelJSON,
signedSetupCommitment: MinimalTransaction,
signedFreeBalanceUpdate: SetStateCommitmentJSON,
): Promise<void> {
return this.execute((store) => {
if (!stateChannel.freeBalanceAppInstance) {
throw new Error(`Cannot create a channel without an available free balance app`);
}
const updatedStore = this.setSetStateCommitment(
this.setSetupCommitment(
this.setStateChannel(store, stateChannel),
stateChannel.multisigAddress,
signedSetupCommitment,
),
stateChannel.freeBalanceAppInstance!.identityHash,
signedFreeBalanceUpdate,
);
return this.saveStore(updatedStore);
});
}
////////////////////////////////////////
//// UserWithdrawals
async saveUserWithdrawal(withdrawalObject: WithdrawalMonitorObject): Promise<void> {
const withdrawalKey = this.storage.getKey(WITHDRAWAL_COMMITMENT, `monitor`);
const withdrawals = await this.getUserWithdrawals();
const idx = withdrawals.findIndex(
(x) => x.tx.data === withdrawalObject.tx.data && x.tx.to === withdrawalObject.tx.to,
);
if (idx === -1) {
return this.setItem(withdrawalKey, withdrawals.concat([withdrawalObject]));
} else {
withdrawals[idx] = withdrawalObject;
return this.setItem(withdrawalKey, withdrawals);
}
}
async getUserWithdrawals(): Promise<WithdrawalMonitorObject[]> {
const withdrawalKey = this.storage.getKey(WITHDRAWAL_COMMITMENT, `monitor`);
const item = await this.getItem<WithdrawalMonitorObject[]>(withdrawalKey);
if (!item) {
return [];
}
return item;
}
async removeUserWithdrawal(toRemove: WithdrawalMonitorObject): Promise<void> {
const withdrawalKey = this.storage.getKey(WITHDRAWAL_COMMITMENT, `monitor`);
const withdrawals = await this.getUserWithdrawals();
const updated = withdrawals.filter((x) => JSON.stringify(x) !== JSON.stringify(toRemove));
return this.setItem(withdrawalKey, updated);
}
////////////////////////////////////////
//// Private Helper Methods
// TODO: make private?
async getItem<T>(key: string): Promise<T | undefined> {
const store = await this.execute((store) => store);
const item = store[key];
if (!item || Object.values(item).length === 0) {
return undefined;
}
return item;
}
// TODO: make private?
async setItem<T>(key: string, value: T): Promise<void> {
return this.execute((store) => {
store[key] = value;
return this.saveStore(store);
});
}
// TODO: make private?
async getKeys(): Promise<string[]> {
return Object.keys(await this.getStore());
}
// Get a fresh copy of the store JSON object from the STORE super-key that contains everything
// This method uses .getItem() which uses execute() which will wait for all deferred ops
// As a result, this method will return a fresh, up-to-date copy of the store
private async getStore(): Promise<any> {
return await this.storage.getItem(this.storage.getKey(STORE)) || {};
}
private async getEntries(): Promise<[string, any][]> {
const store = await this.execute((store) => store);
return Object.entries(store);
}
private async removeItem(key: string): Promise<void> {
return this.execute((store) => {
delete store[key];
return this.saveStore(store);
});
}
private getKey(...args: string[]): string {
return this.storage.getKey(...args);
}
private async saveStore(store: any): Promise<any> {
const storeKey = this.storage.getKey(STORE);
if (this.backupService) {
try {
await this.backupService.backup({ path: storeKey, value: store });
} catch (e) {
this.log.warn(
`Could not save ${storeKey} to backup service. Error: ${e.stack || e.message}`,
);
}
}
await this.storage.setItem(storeKey, store);
return store;
}
private setStateChannel(store: any, stateChannel: StateChannelJSON): any {
const channelKey = this.storage.getKey(CHANNEL, stateChannel.multisigAddress);
store[channelKey] = {
...stateChannel,
proposedAppInstances: stateChannel.proposedAppInstances.map(([id, proposal]) => [
id,
proposal,
]),
appInstances: stateChannel.appInstances.map(([id, app]) => [id, app]),
};
return store;
}
private getStateChannelFromStore(
store: any,
multisigAddress: string,
): StateChannelJSON | undefined {
const channelKey = this.storage.getKey(CHANNEL, multisigAddress);
const item = store[channelKey];
return item ? properlyConvertChannelNullVals(item) : undefined;
}
private getLatestSetStateCommitment(
store: any,
appIdentityHash: Bytes32,
): SetStateCommitmentJSON | undefined {
const setStateKey = this.storage.getKey(SET_STATE_COMMITMENT, appIdentityHash);
const commitments = [...(store[setStateKey] || [])];
if (commitments.length === 0) {
return undefined;
}
const [latest] = commitments.sort((a, b) =>
toBN(b.versionNumber).sub(toBN(a.versionNumber)).toNumber(),
);
return latest;
}
private setSetupCommitment(
store: any,
multisigAddress: string,
commitment: MinimalTransaction,
): any {
const setupCommitmentKey = this.storage.getKey(SETUP_COMMITMENT, multisigAddress);
store[setupCommitmentKey] = commitment;
return store;
}
private setConditionalTransactionCommitment(
store: any,
appIdentityHash: string,
commitment: ConditionalTransactionCommitmentJSON,
): any {
const conditionalCommitmentKey = this.storage.getKey(CONDITIONAL_COMMITMENT, appIdentityHash);
store[conditionalCommitmentKey] = commitment;
return store;
}
private unsetConditionalTransactionCommitment(store: any, appIdentityHash: string): any {
const conditionalCommitmentKey = this.storage.getKey(CONDITIONAL_COMMITMENT, appIdentityHash);
if (store[conditionalCommitmentKey]) {
delete store[conditionalCommitmentKey];
}
return store;
}
private setSetStateCommitment(
store: any,
appIdentityHash: string,
commitment: SetStateCommitmentJSON,
): any {
const setStateKey = this.storage.getKey(SET_STATE_COMMITMENT, appIdentityHash);
const existing = [...(store[setStateKey] || [])];
const idx = existing.findIndex((c) => toBN(c.versionNumber).eq(toBN(commitment.versionNumber)));
idx === -1 ? existing.push(commitment) : (existing[idx] = commitment);
store[setStateKey] = existing;
return store;
}
private unsetSetStateCommitment(store: any, appIdentityHash: string, versionNumber: string): any {
const setStateKey = this.storage.getKey(SET_STATE_COMMITMENT, appIdentityHash);
const existing = [...(store[setStateKey] || [])];
// find commitment equal to or below version number
const remaining = existing.filter((commitment) =>
toBN(commitment.versionNumber).gt(versionNumber),
);
if (remaining.length === 0) {
delete store[setStateKey];
} else {
store[setStateKey] = remaining;
}
return store;
}
private hasAppIdentityHash(
hash: string,
toSearch: [string, AppInstanceJson][] | [string, AppInstanceJson][],
) {
const existsIndex = toSearch.findIndex(([idHash, app]) => idHash === hash);
return existsIndex >= 0;
}
/**
* NOTE: this relies on all `instruction`s being idempotent in case
* the same instruction is added to the `deferred` array simultaneously.
*
* Additionally, if you call a function within `execute` that also calls
* `execute` you will have an infinite loop.
*/
private execute = async (instruction: (store: any) => Promise<any>): Promise<any> => {
// TODO: ideally this would make sure `this.deferred` is properly deduped.
// right now, it does not protect against that. Instead idempotent calls are serialized,
// and the same call may be run multiple times. While this is not a problem, it is
const store = await this.getStore();
this.deferred.push((store) => instruction(store));
const updatedStore = await pWaterfall(this.deferred, store);
this.deferred = [];
// const updatedStore = await instruction(store);
return updatedStore;
};
}
export default StoreService; | the_stack |
import * as fs from 'fs';
import * as cxschema from '@aws-cdk/cloud-assembly-schema';
import * as cxapi from '@aws-cdk/cx-api';
import { App, Aws, CfnResource, ContextProvider, DefaultStackSynthesizer, FileAssetPackaging, Stack } from '../../lib';
import { evaluateCFN } from '../evaluate-cfn';
const CFN_CONTEXT = {
'AWS::Region': 'the_region',
'AWS::AccountId': 'the_account',
'AWS::URLSuffix': 'domain.aws',
};
let app: App;
let stack: Stack;
describe('new style synthesis', () => {
beforeEach(() => {
app = new App({
context: {
[cxapi.NEW_STYLE_STACK_SYNTHESIS_CONTEXT]: 'true',
},
});
stack = new Stack(app, 'Stack');
});
test('stack template is in asset manifest', () => {
// GIVEN
new CfnResource(stack, 'Resource', {
type: 'Some::Resource',
});
// WHEN
const asm = app.synth();
// THEN -- the S3 url is advertised on the stack artifact
const stackArtifact = asm.getStackArtifact('Stack');
const templateObjectKey = last(stackArtifact.stackTemplateAssetObjectUrl?.split('/'));
expect(stackArtifact.stackTemplateAssetObjectUrl).toEqual(`s3://cdk-hnb659fds-assets-\${AWS::AccountId}-\${AWS::Region}/${templateObjectKey}`);
// THEN - the template is in the asset manifest
const manifestArtifact = asm.artifacts.filter(isAssetManifest)[0];
expect(manifestArtifact).toBeDefined();
const manifest: cxschema.AssetManifest = JSON.parse(fs.readFileSync(manifestArtifact.file, { encoding: 'utf-8' }));
const firstFile = (manifest.files ? manifest.files[Object.keys(manifest.files)[0]] : undefined) ?? {};
expect(firstFile).toEqual({
source: { path: 'Stack.template.json', packaging: 'file' },
destinations: {
'current_account-current_region': {
bucketName: 'cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}',
objectKey: templateObjectKey,
assumeRoleArn: 'arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}',
},
},
});
});
test('version check is added to template', () => {
// GIVEN
new CfnResource(stack, 'Resource', {
type: 'Some::Resource',
});
// THEN
const template = app.synth().getStackByName('Stack').template;
expect(template?.Parameters?.BootstrapVersion?.Type).toEqual('AWS::SSM::Parameter::Value<String>');
expect(template?.Parameters?.BootstrapVersion?.Default).toEqual('/cdk-bootstrap/hnb659fds/version');
expect(template?.Parameters?.BootstrapVersion?.Description).toContain(cxapi.SSMPARAM_NO_INVALIDATE);
const assertions = template?.Rules?.CheckBootstrapVersion?.Assertions ?? [];
expect(assertions.length).toEqual(1);
expect(assertions[0].Assert).toEqual({
'Fn::Not': [
{ 'Fn::Contains': [['1', '2', '3', '4', '5'], { Ref: 'BootstrapVersion' }] },
],
});
});
test('version check is not added to template if disabled', () => {
// GIVEN
stack = new Stack(app, 'Stack2', {
synthesizer: new DefaultStackSynthesizer({
generateBootstrapVersionRule: false,
}),
});
new CfnResource(stack, 'Resource', {
type: 'Some::Resource',
});
// THEN
const template = app.synth().getStackByName('Stack2').template;
expect(template?.Rules?.CheckBootstrapVersion).toEqual(undefined);
});
test('customize version parameter', () => {
// GIVEN
const myapp = new App();
// WHEN
const mystack = new Stack(myapp, 'mystack', {
synthesizer: new DefaultStackSynthesizer({
bootstrapStackVersionSsmParameter: 'stack-version-parameter',
}),
});
mystack.synthesizer.addFileAsset({
fileName: __filename,
packaging: FileAssetPackaging.FILE,
sourceHash: 'file-asset-hash',
});
// THEN
const asm = myapp.synth();
const manifestArtifact = getAssetManifest(asm);
// THEN - the asset manifest has an SSM parameter entry
expect(manifestArtifact.bootstrapStackVersionSsmParameter).toEqual('stack-version-parameter');
});
test('generates missing context with the lookup role ARN as one of the missing context properties', () => {
// GIVEN
stack = new Stack(app, 'Stack2', {
synthesizer: new DefaultStackSynthesizer({
generateBootstrapVersionRule: false,
}),
env: {
account: '111111111111', region: 'us-east-1',
},
});
ContextProvider.getValue(stack, {
provider: cxschema.ContextProvider.VPC_PROVIDER,
props: {},
dummyValue: undefined,
}).value;
// THEN
const assembly = app.synth();
expect(assembly.manifest.missing![0].props.lookupRoleArn).toEqual('arn:${AWS::Partition}:iam::111111111111:role/cdk-hnb659fds-lookup-role-111111111111-us-east-1');
});
test('add file asset', () => {
// WHEN
const location = stack.synthesizer.addFileAsset({
fileName: __filename,
packaging: FileAssetPackaging.FILE,
sourceHash: 'abcdef',
});
// THEN - we have a fixed asset location with region placeholders
expect(evalCFN(location.bucketName)).toEqual('cdk-hnb659fds-assets-the_account-the_region');
expect(evalCFN(location.s3Url)).toEqual('https://s3.the_region.domain.aws/cdk-hnb659fds-assets-the_account-the_region/abcdef.js');
// THEN - object key contains source hash somewhere
expect(location.objectKey.indexOf('abcdef')).toBeGreaterThan(-1);
});
test('add docker image asset', () => {
// WHEN
const location = stack.synthesizer.addDockerImageAsset({
directoryName: '.',
sourceHash: 'abcdef',
});
// THEN - we have a fixed asset location with region placeholders
expect(evalCFN(location.repositoryName)).toEqual('cdk-hnb659fds-container-assets-the_account-the_region');
expect(evalCFN(location.imageUri)).toEqual('the_account.dkr.ecr.the_region.domain.aws/cdk-hnb659fds-container-assets-the_account-the_region:abcdef');
});
test('synthesis', () => {
// GIVEN
stack.synthesizer.addFileAsset({
fileName: __filename,
packaging: FileAssetPackaging.FILE,
sourceHash: 'abcdef',
});
stack.synthesizer.addDockerImageAsset({
directoryName: '.',
sourceHash: 'abcdef',
});
// WHEN
const asm = app.synth();
// THEN - we have an asset manifest with both assets and the stack template in there
const manifestArtifact = getAssetManifest(asm);
const manifest = readAssetManifest(manifestArtifact);
expect(Object.keys(manifest.files || {}).length).toEqual(2);
expect(Object.keys(manifest.dockerImages || {}).length).toEqual(1);
// THEN - the asset manifest has an SSM parameter entry
expect(manifestArtifact.bootstrapStackVersionSsmParameter).toEqual('/cdk-bootstrap/hnb659fds/version');
// THEN - every artifact has an assumeRoleArn
for (const file of Object.values(manifest.files ?? {})) {
for (const destination of Object.values(file.destinations)) {
expect(destination.assumeRoleArn).toEqual('arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}');
}
}
for (const file of Object.values(manifest.dockerImages ?? {})) {
for (const destination of Object.values(file.destinations)) {
expect(destination.assumeRoleArn).toEqual('arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-image-publishing-role-${AWS::AccountId}-${AWS::Region}');
}
}
});
test('customize publishing resources', () => {
// GIVEN
const myapp = new App();
// WHEN
const mystack = new Stack(myapp, 'mystack', {
synthesizer: new DefaultStackSynthesizer({
fileAssetsBucketName: 'file-asset-bucket',
fileAssetPublishingRoleArn: 'file:role:arn',
fileAssetPublishingExternalId: 'file-external-id',
imageAssetsRepositoryName: 'image-ecr-repository',
imageAssetPublishingRoleArn: 'image:role:arn',
imageAssetPublishingExternalId: 'image-external-id',
}),
});
mystack.synthesizer.addFileAsset({
fileName: __filename,
packaging: FileAssetPackaging.FILE,
sourceHash: 'file-asset-hash',
});
mystack.synthesizer.addDockerImageAsset({
directoryName: '.',
sourceHash: 'docker-asset-hash',
});
// THEN
const asm = myapp.synth();
const manifest = readAssetManifest(getAssetManifest(asm));
expect(manifest.files?.['file-asset-hash']?.destinations?.['current_account-current_region']).toEqual({
bucketName: 'file-asset-bucket',
objectKey: 'file-asset-hash.js',
assumeRoleArn: 'file:role:arn',
assumeRoleExternalId: 'file-external-id',
});
expect(manifest.dockerImages?.['docker-asset-hash']?.destinations?.['current_account-current_region']).toEqual({
repositoryName: 'image-ecr-repository',
imageTag: 'docker-asset-hash',
assumeRoleArn: 'image:role:arn',
assumeRoleExternalId: 'image-external-id',
});
});
test('customize deploy role externalId', () => {
// GIVEN
const myapp = new App();
// WHEN
const mystack = new Stack(myapp, 'mystack', {
synthesizer: new DefaultStackSynthesizer({
deployRoleExternalId: 'deploy-external-id',
}),
});
// THEN
const asm = myapp.synth();
const stackArtifact = asm.getStackByName(mystack.stackName);
expect(stackArtifact.assumeRoleExternalId).toEqual('deploy-external-id');
});
test('synthesis with bucketPrefix', () => {
// GIVEN
const myapp = new App();
// WHEN
const mystack = new Stack(myapp, 'mystack-bucketPrefix', {
synthesizer: new DefaultStackSynthesizer({
fileAssetsBucketName: 'file-asset-bucket',
fileAssetPublishingRoleArn: 'file:role:arn',
fileAssetPublishingExternalId: 'file-external-id',
bucketPrefix: '000000000000/',
}),
});
mystack.synthesizer.addFileAsset({
fileName: __filename,
packaging: FileAssetPackaging.FILE,
sourceHash: 'file-asset-hash-with-prefix',
});
// WHEN
const asm = myapp.synth();
// THEN -- the S3 url is advertised on the stack artifact
const stackArtifact = asm.getStackArtifact('mystack-bucketPrefix');
// THEN - we have an asset manifest with both assets and the stack template in there
const manifest = readAssetManifest(getAssetManifest(asm));
// THEN
expect(manifest.files?.['file-asset-hash-with-prefix']?.destinations?.['current_account-current_region']).toEqual({
bucketName: 'file-asset-bucket',
objectKey: '000000000000/file-asset-hash-with-prefix.js',
assumeRoleArn: 'file:role:arn',
assumeRoleExternalId: 'file-external-id',
});
const templateHash = last(stackArtifact.stackTemplateAssetObjectUrl?.split('/'));
expect(stackArtifact.stackTemplateAssetObjectUrl).toEqual(`s3://file-asset-bucket/000000000000/${templateHash}`);
});
test('synthesis with dockerPrefix', () => {
// GIVEN
const myapp = new App();
// WHEN
const mystack = new Stack(myapp, 'mystack-dockerPrefix', {
synthesizer: new DefaultStackSynthesizer({
dockerTagPrefix: 'test-prefix-',
}),
});
mystack.synthesizer.addDockerImageAsset({
directoryName: 'some-folder',
sourceHash: 'docker-asset-hash',
});
const asm = myapp.synth();
// THEN
const manifest = readAssetManifest(getAssetManifest(asm));
const imageTag = manifest.dockerImages?.['docker-asset-hash']?.destinations?.['current_account-current_region'].imageTag;
expect(imageTag).toEqual('test-prefix-docker-asset-hash');
});
test('cannot use same synthesizer for multiple stacks', () => {
// GIVEN
const synthesizer = new DefaultStackSynthesizer();
// WHEN
new Stack(app, 'Stack2', { synthesizer });
expect(() => {
new Stack(app, 'Stack3', { synthesizer });
}).toThrow(/A StackSynthesizer can only be used for one Stack/);
});
});
test('get an exception when using tokens for parameters', () => {
expect(() => {
// GIVEN
new DefaultStackSynthesizer({
fileAssetsBucketName: `my-bucket-${Aws.REGION}`,
});
}).toThrow(/cannot contain tokens/);
});
/**
* Evaluate a possibly string-containing value the same way CFN would do
*
* (Be invariant to the specific Fn::Sub or Fn::Join we would output)
*/
function evalCFN(value: any) {
return evaluateCFN(stack.resolve(value), CFN_CONTEXT);
}
function isAssetManifest(x: cxapi.CloudArtifact): x is cxapi.AssetManifestArtifact {
return x instanceof cxapi.AssetManifestArtifact;
}
function getAssetManifest(asm: cxapi.CloudAssembly): cxapi.AssetManifestArtifact {
const manifestArtifact = asm.artifacts.filter(isAssetManifest)[0];
if (!manifestArtifact) { throw new Error('no asset manifest in assembly'); }
return manifestArtifact;
}
function readAssetManifest(manifestArtifact: cxapi.AssetManifestArtifact): cxschema.AssetManifest {
return JSON.parse(fs.readFileSync(manifestArtifact.file, { encoding: 'utf-8' }));
}
function last<A>(xs?: A[]): A | undefined {
return xs ? xs[xs.length - 1] : undefined;
} | the_stack |
import { fabric } from 'fabric';
import { Handler } from '.';
import { WorkareaLayout, WorkareaObject, FabricImage } from '../utils';
import { VideoObject } from '../objects/Video';
class WorkareaHandler {
handler: Handler;
constructor(handler: Handler) {
this.handler = handler;
this.initialize();
}
/**
* Initialize workarea
*
* @author salgum1114
*/
public initialize() {
const { workareaOption } = this.handler;
const image = new Image(workareaOption.width, workareaOption.height);
image.width = workareaOption.width;
image.height = workareaOption.height;
this.handler.workarea = new fabric.Image(image, workareaOption) as WorkareaObject;
this.handler.canvas.add(this.handler.workarea);
this.handler.objects = this.handler.getObjects();
this.handler.canvas.centerObject(this.handler.workarea);
this.handler.canvas.renderAll();
}
/**
* Set the layout on workarea
* @param {WorkareaLayout} layout
* @returns
*/
public setLayout = (layout: WorkareaLayout) => {
this.handler.workarea.set('layout', layout);
const { _element, isElement, workareaWidth, workareaHeight } = this.handler.workarea;
const { canvas } = this.handler;
let scaleX = 1;
let scaleY = 1;
const isFixed = layout === 'fixed';
const isResponsive = layout === 'responsive';
const isFullscreen = layout === 'fullscreen';
if (isElement) {
if (isFixed) {
scaleX = workareaWidth / _element.width;
scaleY = workareaHeight / _element.height;
} else if (isResponsive) {
const scales = this.calculateScale();
scaleX = scales.scaleX;
scaleY = scales.scaleY;
} else {
scaleX = canvas.getWidth() / _element.width;
scaleY = canvas.getHeight() / _element.height;
}
}
this.handler.getObjects().forEach(obj => {
const { id, player } = obj as VideoObject;
if (id !== 'workarea') {
const objScaleX = !isFullscreen ? 1 : scaleX;
const objScaleY = !isFullscreen ? 1 : scaleY;
const objWidth = obj.width * objScaleX * canvas.getZoom();
const objHeight = obj.height * objScaleY * canvas.getZoom();
const el = this.handler.elementHandler.findById(obj.id);
this.handler.elementHandler.setSize(el, obj);
if (player) {
player.setPlayerSize(objWidth, objHeight);
}
obj.set({
scaleX: !isFullscreen ? 1 : objScaleX,
scaleY: !isFullscreen ? 1 : objScaleY,
});
}
});
if (isResponsive) {
const center = canvas.getCenter();
if (isElement) {
this.handler.workarea.set({
scaleX: 1,
scaleY: 1,
});
this.handler.zoomHandler.zoomToPoint(new fabric.Point(center.left, center.top), scaleX);
} else {
this.handler.workarea.set({
width: workareaWidth,
height: workareaHeight,
});
scaleX = canvas.getWidth() / workareaWidth;
scaleY = canvas.getHeight() / workareaHeight;
if (workareaHeight >= workareaWidth) {
scaleX = scaleY;
} else {
scaleY = scaleX;
}
this.handler.zoomHandler.zoomToPoint(new fabric.Point(center.left, center.top), scaleX);
}
canvas.centerObject(this.handler.workarea);
canvas.renderAll();
return;
}
if (isElement) {
this.handler.workarea.set({
width: _element.width,
height: _element.height,
scaleX,
scaleY,
});
} else {
const width = isFixed ? workareaWidth : this.handler.canvas.getWidth();
const height = isFixed ? workareaHeight : this.handler.canvas.getHeight();
this.handler.workarea.set({
width,
height,
backgroundColor: 'rgba(255, 255, 255, 1)',
});
this.handler.canvas.renderAll();
if (isFixed) {
canvas.centerObject(this.handler.workarea);
} else {
this.handler.workarea.set({
left: 0,
top: 0,
});
}
}
canvas.centerObject(this.handler.workarea);
const center = canvas.getCenter();
canvas.setViewportTransform([1, 0, 0, 1, 0, 0]);
this.handler.zoomHandler.zoomToPoint(new fabric.Point(center.left, center.top), 1);
canvas.renderAll();
};
/**
* Set the responsive image on Workarea
* @param {string | File} [source]
* @param {boolean} [loaded]
* @returns
*/
public setResponsiveImage = async (source: string | File, loaded?: boolean) => {
const imageFromUrl = async (src: string = '') => {
return new Promise<WorkareaObject>(resolve => {
fabric.Image.fromURL(src, (img: any) => {
const { canvas, workarea, editable } = this.handler;
const { workareaWidth, workareaHeight } = workarea;
const { scaleX, scaleY } = this.calculateScale(img);
if (img._element) {
workarea.set({
...img,
isElement: true,
selectable: false,
});
} else {
const image = new Image(workareaWidth, workareaHeight);
workarea.setElement(image);
workarea.set({
isElement: false,
selectable: false,
width: workareaWidth,
height: workareaHeight,
});
}
if (editable && !loaded) {
canvas.getObjects().forEach(obj => {
const { id, player } = obj as VideoObject;
if (id !== 'workarea') {
const objWidth = obj.width * scaleX;
const objHeight = obj.height * scaleY;
const el = this.handler.elementHandler.findById(id);
this.handler.elementHandler.setScaleOrAngle(el, obj);
this.handler.elementHandler.setSize(el, obj);
if (player) {
player.setPlayerSize(objWidth, objHeight);
}
obj.set({
scaleX: 1,
scaleY: 1,
});
obj.setCoords();
}
});
}
// 파일이 없을 경우 Canvas의 nextWidth, nextHeight 값이 변경되기 전 상태에서 zoomToFit이 동작함
// 정상 동작 resize event logic => zoomToFit
// 현재 동작 zoomToFit -> resize event logic
this.handler.zoomHandler.zoomToFit();
canvas.centerObject(workarea);
resolve(workarea);
});
});
};
const { workarea } = this.handler;
if (!source) {
workarea.set({
src: null,
file: null,
});
return imageFromUrl(source as string);
}
if (source instanceof File) {
return new Promise<WorkareaObject>(resolve => {
const reader = new FileReader();
reader.onload = () => {
workarea.set({
file: source,
});
imageFromUrl(reader.result as string).then(resolve);
};
reader.readAsDataURL(source);
});
} else {
workarea.set({
src: source,
});
return imageFromUrl(source);
}
};
/**
* Set the image on Workarea
* @param {string | File} source
* @param {boolean} [loaded=false]
* @returns
*/
setImage = async (source: string | File, loaded = false) => {
const { canvas, workarea, editable } = this.handler;
if (workarea.layout === 'responsive') {
return this.setResponsiveImage(source, loaded);
}
const imageFromUrl = async (src: string) => {
return new Promise<WorkareaObject>(resolve => {
fabric.Image.fromURL(src, (img: any) => {
let width = canvas.getWidth();
let height = canvas.getHeight();
if (workarea.layout === 'fixed') {
width = workarea.width * workarea.scaleX;
height = workarea.height * workarea.scaleY;
}
let scaleX = 1;
let scaleY = 1;
if (img._element) {
scaleX = width / img.width;
scaleY = height / img.height;
img.set({
originX: 'left',
originY: 'top',
scaleX,
scaleY,
});
workarea.set({
...img,
isElement: true,
selectable: false,
});
} else {
workarea.setElement(new Image());
workarea.set({
width,
height,
scaleX,
scaleY,
isElement: false,
selectable: false,
});
}
canvas.centerObject(workarea);
if (editable && !loaded) {
const { layout } = workarea;
canvas.getObjects().forEach(obj => {
const { id, player } = obj as VideoObject;
if (id !== 'workarea') {
scaleX = layout === 'fullscreen' ? scaleX : obj.scaleX;
scaleY = layout === 'fullscreen' ? scaleY : obj.scaleY;
const el = this.handler.elementHandler.findById(id);
this.handler.elementHandler.setSize(el, obj);
if (player) {
const objWidth = obj.width * scaleX;
const objHeight = obj.height * scaleY;
player.setPlayerSize(objWidth, objHeight);
}
obj.set({
scaleX,
scaleY,
});
obj.setCoords();
}
});
}
const center = canvas.getCenter();
const zoom = loaded || workarea.layout === 'fullscreen' ? 1 : this.handler.canvas.getZoom();
canvas.setViewportTransform([1, 0, 0, 1, 0, 0]);
this.handler.zoomHandler.zoomToPoint(new fabric.Point(center.left, center.top), zoom);
canvas.renderAll();
resolve(workarea);
});
});
};
if (!source) {
workarea.set({
src: null,
file: null,
});
return imageFromUrl(source as string);
}
if (source instanceof File) {
return new Promise<WorkareaObject>(resolve => {
const reader = new FileReader();
reader.onload = () => {
workarea.set({
file: source,
});
imageFromUrl(reader.result as string).then(resolve);
};
reader.readAsDataURL(source);
});
} else {
workarea.set({
src: source,
});
return imageFromUrl(source);
}
};
/**
* Calculate scale to the image
*
* @param {FabricImage} [image]
* @returns
*/
public calculateScale = (image?: FabricImage) => {
const { canvas, workarea } = this.handler;
const { workareaWidth, workareaHeight } = workarea;
const { _element } = image || workarea;
const width = _element?.width || workareaWidth;
const height = _element?.height || workareaHeight;
let scaleX = canvas.getWidth() / width;
let scaleY = canvas.getHeight() / height;
if (height >= width) {
scaleX = scaleY;
if (canvas.getWidth() < width * scaleX) {
scaleX = scaleX * (canvas.getWidth() / (width * scaleX));
}
} else {
scaleY = scaleX;
if (canvas.getHeight() < height * scaleX) {
scaleX = scaleX * (canvas.getHeight() / (height * scaleX));
}
}
return { scaleX, scaleY };
};
}
export default WorkareaHandler; | the_stack |
import ProjectConfig from "@dxatscale/sfpowerscripts.core/lib/project/ProjectConfig";
import { jest, expect } from "@jest/globals";
import ProjectValidation from "../src/ProjectValidation"
describe("Given a sfdx-project.json, it should be validated against the scehma", () => {
it("should not throw an error for a valid sfdx-project.json without any sfpowerscripts decorators", () => {
let sfdx_project={
"packageDirectories": [
{
"path": "packages/temp",
"default": true,
"package": "temp",
"versionName": "temp",
"versionNumber": "1.0.0.0"
},
{
"path": "packages/domains/core",
"package": "core",
"default": false,
"versionName": "core",
"versionNumber": "1.0.0.0"
},
{
"path": "packages/frameworks/mass-dataload",
"package": "mass-dataload",
"default": false,
"versionName": "mass-dataload",
"versionNumber": "1.0.0.0"
},
{
"path": "packages/access-mgmt",
"package": "access-mgmt",
"default": false,
"versionName": "access-mgmt",
"versionNumber": "1.0.0.0"
},
{
"path": "packages/bi",
"package": "bi",
"default": false,
"versionName": "bi",
"versionNumber": "1.0.0.0"
}
],
"namespace": "",
"sfdcLoginUrl": "https://login.salesforce.com",
"sourceApiVersion": "50.0",
"packageAliases":
{ "bi":"04t000000000000" }
};
const projectConfigMock = jest.spyOn(ProjectConfig, "getSFDXPackageManifest");
projectConfigMock.mockImplementation(()=>{return sfdx_project})
expect(() => { new ProjectValidation().validateSFDXProjectJSON(); }).not.toThrow();
});
it("should throw an error for a sfdx-project.json where a package directory is missing package name", () => {
let sfdx_project={
"packageDirectories": [
{
"path": "packages/temp",
"default": true,
"package": "temp",
"versionName": "temp",
"versionNumber": "1.0.0.0",
},
{
"path": "packages/domains/core",
"package": "core",
"default": false,
"versionName": "core",
"versionNumber": "1.0.0.0"
},
{
"path": "packages/frameworks/mass-dataload",
"package": "mass-dataload",
"default": false,
"type":"data",
"versionName": "mass-dataload",
"versionNumber": "1.0.0.0"
},
{
"path": "packages/access-mgmt",
"package": "access-mgmt",
"default": false,
"versionName": "access-mgmt",
"versionNumber": "1.0.0.0"
},
{
"path": "packages/bi",
}
],
"namespace": "",
"sfdcLoginUrl": "https://login.salesforce.com",
"sourceApiVersion": "50.0",
"packageAliases":
{ "bi":"04t000000000000" }
};
const projectConfigMock = jest.spyOn(ProjectConfig, "getSFDXPackageManifest");
projectConfigMock.mockImplementation(()=>{return sfdx_project})
expect(() => { new ProjectValidation().validateSFDXProjectJSON(); }).toThrow();
});
it("should not throw an error for a sfdx-project.json where various sfpowerscripts orchestrator properties are used", () => {
let sfdx_project={
"packageDirectories": [
{
"path": "packages/temp",
"default": true,
"package": "temp",
"versionName": "temp",
"versionNumber": "1.0.0.0",
"ignoreOnStage": ["prepare","validate","build"]
},
{
"path": "packages/domains/core",
"package": "core",
"default": false,
"versionName": "core",
"versionNumber": "1.0.0.0",
"skipCoverageValidation":true,
"skipTesting": true,
"isOptimizedDeployment":false,
"destructiveChangePath":"test/1.xml"
},
{
"path": "packages/frameworks/mass-dataload",
"package": "mass-dataload",
"default": false,
"type":"data",
"versionName": "mass-dataload",
"versionNumber": "1.0.0.0" ,
"postDeploymentScript":"test/1.bat",
"preDeploymentScript":"test/2.bat",
"assignPermSetsPreDeployment":["PS1","PS2"],
"assignPermSetsPostDeployment":["PS3","PS4"]
},
{
"path": "packages/access-mgmt",
"package": "access-mgmt",
"default": false,
"versionName": "access-mgmt",
"versionNumber": "1.0.0.0",
"reconcileProfiles": true,
"alwaysDeploy":true
},
{
"path": "packages/bi",
"package": "bi",
"default": false,
"versionName": "bi",
"versionNumber": "1.0.0.0",
"aliasfy":true,
"skipDeployOnOrgs":["uat"]
}
],
"namespace": "",
"sfdcLoginUrl": "https://login.salesforce.com",
"sourceApiVersion": "50.0",
"packageAliases":
{ "bi":"04t000000000000" },
"plugins": {
"ignoreFiles": {
"prepare": "path/to/.forceignore",
"validate": "path/to/.forceignore",
"quickbuild": "path/to/.forceignore",
"build": "path/to/.forceignore"
}
}
};
const projectConfigMock = jest.spyOn(ProjectConfig, "getSFDXPackageManifest");
projectConfigMock.mockImplementation(()=>{return sfdx_project})
expect(() => { new ProjectValidation().validateSFDXProjectJSON(); }).not.toThrow();
});
it("should throw an error for a sfdx-project.json where various sfpowerscripts orchestrator properties are incorrectly used", () => {
let sfdx_project={
"packageDirectories": [
{
"path": "packages/temp",
"default": true,
"package": "temp",
"versionName": "temp",
"versionNumber": "1.0.0.0",
"ignoreOnStage": ["prepare","validate","build","test"]
},
{
"path": "packages/domains/core",
"package": "core",
"default": false,
"versionName": "core",
"versionNumber": "1.0.0.0",
"skipCoverageValidation":true,
"skipTesting": "true",
"isOptimizedDeployment":false,
"destructiveChangePath":true
},
{
"path": "packages/frameworks/mass-dataload",
"package": "mass-dataload",
"default": false,
"type":"data",
"versionName": "mass-dataload",
"versionNumber": "1.0.0.0" ,
"postDeploymentScript":"test/1.bat",
"preDeploymentScript":"test/2.bat",
"assignPermsetsPreDeployment":["PS1","PS2"],
"assignPermsetsPostDeployment":["PS3","PS4"]
},
{
"path": "packages/access-mgmt",
"package": "access-mgmt",
"default": false,
"versionName": "access-mgmt",
"versionNumber": "1.0.0.0",
"reconcileProfiles": "true",
"alwaysDeploy":true
},
{
"path": "packages/bi",
"package": "bi",
"default": false,
"versionName": "bi",
"versionNumber": "1.0.0.0",
"aliasfy":"false",
"skipDeployOnOrgs":["uat"]
}
],
"namespace": "",
"sfdcLoginUrl": "https://login.salesforce.com",
"sourceApiVersion": "50.0",
"packageAliases":
{ "bi":"04t000000000000" }
};
const projectConfigMock = jest.spyOn(ProjectConfig, "getSFDXPackageManifest");
projectConfigMock.mockImplementation(()=>{return sfdx_project})
expect(() => { new ProjectValidation().validateSFDXProjectJSON(); }).toThrow();
});
it("should not throw an package-specific error for sfdx-project.json when version number is used correctly", () => {
// sfdx-project.json includes one source package with specific build number (valid) and one unlocked package using NEXT keyword (also valid)
let sfdx_project={
"packageDirectories": [
{
"path": "packages/temp",
"default": true,
"package": "temp",
"type": "source",
"versionName": "temp",
"versionNumber": "1.0.0.0"
},
{
"path": "packages/domains/core",
"package": "core",
"default": false,
"versionName": "core",
"versionNumber": "1.0.0.NEXT"
}
],
"namespace": "",
"sfdcLoginUrl": "https://login.salesforce.com",
"sourceApiVersion": "50.0",
"packageAliases":
{ "core":"04t000000000000" }
};
const projectConfigMock = jest.spyOn(ProjectConfig, "getSFDXPackageManifest");
projectConfigMock.mockImplementation(()=>{return sfdx_project})
expect(() => { new ProjectValidation().validatePackageBuildNumbers(); }).not.toThrow();
});
it("should throw a package-specific error for sfdx-project.json when version number is used incorrectly", () => {
// sfdx-project.json includes two source packages. One with specific build number (valid), one using NEXT keyword (invalid)
let sfdx_project={
"packageDirectories": [
{
"path": "packages/temp",
"default": true,
"package": "temp",
"type": "source",
"versionName": "temp",
"versionNumber": "1.0.0.0"
},
{
"path": "packages/domains/core",
"package": "invalid_core_pkg",
"default": false,
"type": "source",
"versionName": "core",
"versionNumber": "1.0.0.NEXT"
}
],
"namespace": "",
"sfdcLoginUrl": "https://login.salesforce.com",
"sourceApiVersion": "50.0"
};
const projectConfigMock = jest.spyOn(ProjectConfig, "getSFDXPackageManifest");
projectConfigMock.mockImplementation(()=>{return sfdx_project});
let excep;
try {
new ProjectValidation().validatePackageBuildNumbers();
}
catch(error) {
excep = error;
}
expect(excep);
expect(excep.message).toContain('invalid_core_pkg');
});
}); | the_stack |
import * as stream from "stream"
import * as path from "path"
import { getVariableData } from "../../db/model/Variable"
import {
initGrapherForSvgExport,
buildSvgOutFilename,
} from "../../baker/GrapherImageBaker"
import { createGunzip, createGzip } from "zlib"
import * as fs from "fs-extra"
import getStream from "get-stream"
import { LegacyVariablesAndEntityKey } from "../../grapher/core/LegacyVariableCode"
import { ChartTypeName } from "../../grapher/core/GrapherConstants"
import md5 from "md5"
import * as util from "util"
import { GrapherInterface } from "../../grapher/core/GrapherInterface"
import { TESTING_ONLY_reset_guid } from "../../clientUtils/Util"
import _ from "lodash"
const CONFIG_FILENAME: string = "config.json"
const RESULTS_FILENAME = "results.csv"
const DATA_FILENAME = "data.json"
export const SVG_CSV_HEADER = `grapherId,slug,chartType,md5,svgFilename`
export const finished = util.promisify(stream.finished) // (A)
export interface VerifyResultOk {
kind: "ok"
}
export interface VerifyResultDifference {
kind: "difference"
difference: SvgDifference
}
export interface VerifyResultError {
kind: "error"
graphId: number
error: Error
}
export type VerifyResult =
| VerifyResultOk
| VerifyResultDifference
| VerifyResultError
const resultOk = (): VerifyResult => ({
kind: "ok",
})
const resultError = (id: number, error: Error): VerifyResult => ({
kind: "error",
graphId: id,
error: error,
})
const resultDifference = (difference: SvgDifference): VerifyResult => ({
kind: "difference",
difference: difference,
})
export type SvgRecord = {
chartId: number
slug: string
chartType: ChartTypeName | undefined
md5: string
svgFilename: string
}
export interface SvgDifference {
chartId: number
startIndex: number
referenceSvgFragment: string
newSvgFragment: string
}
export interface JobDirectory {
chartId: number
pathToProcess: string
}
export function logIfVerbose(verbose: boolean, message: string, param?: any) {
if (verbose)
if (param) console.log(message, param)
else console.log(message)
}
function findFirstDiffIndex(a: string, b: string): number {
var i = 0
while (i < a.length && i < b.length && a[i] === b[i]) i++
if (a.length === b.length && a.length === i) {
console.warn("No difference found even though hash was different!")
i = -1
}
return i
}
export async function verifySvg(
newSvg: string,
newSvgRecord: SvgRecord,
referenceSvgRecord: SvgRecord,
referenceSvgsPath: string,
verbose: boolean
): Promise<VerifyResult> {
logIfVerbose(verbose, `verifying ${newSvgRecord.chartId}`)
if (newSvgRecord.md5 === referenceSvgRecord.md5) {
// if the md5 hash is unchanged then there is no difference
return resultOk()
}
const referenceSvg = await loadReferenceSvg(
referenceSvgsPath,
referenceSvgRecord
)
const preparedNewSvg = prepareSvgForComparision(newSvg)
const preparedReferenceSvg = prepareSvgForComparision(referenceSvg)
const firstDiffIndex = findFirstDiffIndex(
preparedNewSvg,
preparedReferenceSvg
)
// Sometimes the md5 hash comparision above indicated a difference
// but the character by character comparision gives -1 (no differences)
// Weird - maybe an artifact of a change in how the ids are stripped
// accross version?
if (firstDiffIndex === -1) {
return resultOk()
}
logIfVerbose(verbose, `${newSvgRecord.chartId} had differences`)
return resultDifference({
chartId: newSvgRecord.chartId,
startIndex: firstDiffIndex,
referenceSvgFragment: preparedReferenceSvg.substr(
firstDiffIndex - 20,
40
),
newSvgFragment: preparedNewSvg.substr(firstDiffIndex - 20, 40),
})
}
export async function decideDirectoriesToVerify(
grapherIds: number[],
inDir: string
): Promise<JobDirectory[]> {
let directories: JobDirectory[] = []
if (grapherIds.length === 0) {
// If no grapher ids were given scan all directories in the inDir folder
const dir = await fs.opendir(inDir)
for await (const entry of dir) {
if (entry.isDirectory()) {
directories.push({
chartId: parseInt(entry.name),
pathToProcess: path.join(inDir, entry.name),
})
}
}
} else {
// if grapher ids were given check which ones exist in inDir and filter to those
// -> if by doing so we drop some, warn the user
directories = grapherIds.map((id) => ({
chartId: id,
pathToProcess: path.join(inDir, id.toString()),
}))
const allDirsCount = directories.length
directories = directories.filter((item) =>
fs.existsSync(item.pathToProcess)
)
if (directories.length < allDirsCount) {
console.warn(
`${allDirsCount} grapher ids were given but only ${directories.length} existed as directories`
)
}
}
return directories
}
/** Turn a list of comma separated numbers and ranges into an array of numbers */
export function getGrapherIdListFromString(rawGrapherIds: string): number[] {
return rawGrapherIds.split(",").flatMap((item) => {
if (item.includes("-")) {
const subparts = item.split("-")
if (subparts.length !== 2) {
console.warn(`Invalid graphid range: ${item}`)
return []
} else {
const first = parseInt(subparts[0])
const second = parseInt(subparts[1])
return _.range(first, second + 1)
}
} else {
const parsed = parseInt(item)
if (isNaN(parsed)) {
return []
} else {
return [parsed]
}
}
})
}
export async function writeToGzippedFile(
data: unknown,
filename: string
): Promise<void> {
const json = JSON.stringify(data)
const writeStream = fs.createWriteStream(filename)
const gzipStream = createGzip()
gzipStream.pipe(writeStream)
gzipStream.write(json)
gzipStream.end()
return finished(writeStream)
}
export async function writeToFile(data: unknown, filename: string) {
const json = JSON.stringify(data, null, 2)
await fs.writeFile(filename, json)
}
export interface SaveGrapherSchemaAndDataJob {
config: GrapherInterface
outDir: string
}
export async function saveGrapherSchemaAndData(
jobDescription: SaveGrapherSchemaAndDataJob
): Promise<void> {
const config = jobDescription.config
const outDir = jobDescription.outDir
const dataDir = path.join(outDir, config.id?.toString() ?? "")
if (!fs.existsSync(dataDir)) fs.mkdirSync(dataDir)
const configPath = path.join(dataDir, CONFIG_FILENAME)
const promise1 = writeToFile(config, configPath)
const dataPath = path.join(dataDir, DATA_FILENAME)
const grapher = initGrapherForSvgExport(config)
const variableIds = grapher.dimensions.map((d) => d.variableId)
const promise2 = getVariableData(variableIds).then((vardata) =>
writeToFile(vardata, dataPath)
)
await Promise.allSettled([promise1, promise2])
}
export async function renderSvg(dir: string): Promise<[string, SvgRecord]> {
const [config, data] = await loadGrapherConfigAndData(dir)
// Graphers sometimes need to generate ids (incrementing numbers). For this
// they keep a stateful variable in clientutils. To minimize differences
// between consecutive runs we reset this id here before every export
TESTING_ONLY_reset_guid()
const grapher = initGrapherForSvgExport(config)
const { width, height } = grapher.idealBounds
const outFilename = buildSvgOutFilename(
config.slug!,
config.version,
width,
height
)
grapher.receiveLegacyData(data as LegacyVariablesAndEntityKey)
const svg = grapher.staticSVG
const svgRecord = {
chartId: config.id!,
slug: config.slug!,
chartType: config.type,
md5: processSvgAndCalculateHash(svg),
svgFilename: outFilename,
}
return Promise.resolve([svg, svgRecord])
}
const replaceRegexes = [/id="react-select-[0-9]+-input"/g]
/** Some fragments of the svgs are non-deterministic. This function is used to
delete all such fragments */
function prepareSvgForComparision(svg: string): string {
let current = svg
for (const replaceRegex of replaceRegexes) {
current = svg.replace(replaceRegex, "")
}
return current
}
/** Remove all non-deterministic parts of the svg and then calculate an md5 hash */
export function processSvgAndCalculateHash(svg: string): string {
const processed = prepareSvgForComparision(svg)
return md5(processed)
}
export interface RenderSvgAndSaveJobDescription {
dir: string
outDir: string
}
export async function renderSvgAndSave(
jobDescription: RenderSvgAndSaveJobDescription
): Promise<SvgRecord> {
const dir = jobDescription.dir
const outDir = jobDescription.outDir
const [svg, svgRecord] = await renderSvg(dir)
const outPath = path.join(outDir, svgRecord.svgFilename)
const cleanedSvg = prepareSvgForComparision(svg)
await fs.writeFile(outPath, cleanedSvg)
return Promise.resolve(svgRecord)
}
export async function readGzippedJsonFile(filename: string): Promise<unknown> {
const readStream = fs.createReadStream(filename)
const gzipStream = createGunzip()
readStream.pipe(gzipStream)
const content = await getStream(gzipStream)
return JSON.parse(content)
}
export async function readJsonFile(filename: string): Promise<unknown> {
const content = await fs.readJson(filename)
return content
}
export async function loadReferenceSvg(
referenceDir: string,
referenceSvgRecord: SvgRecord
): Promise<string> {
if (!referenceDir) throw "RefereneDir was empty in loadReferenceSvg"
if (!referenceSvgRecord) throw "reference svg record was not defined"
if (!referenceSvgRecord.svgFilename)
throw "reference svg record.svgfilename was not defined"
const referenceFilename = path.join(
referenceDir,
referenceSvgRecord.svgFilename
)
if (!fs.existsSync(referenceFilename))
throw `Input directory does not exist ${referenceFilename}`
const svg = await fs.readFile(referenceFilename, "utf-8")
return svg
}
export async function loadGrapherConfigAndData(
inputDir: string
): Promise<[GrapherInterface, unknown]> {
if (!fs.existsSync(inputDir))
throw `Input directory does not exist ${inputDir}`
const configPath = path.join(inputDir, CONFIG_FILENAME)
const config = (await readJsonFile(configPath)) as GrapherInterface
const dataPath = path.join(inputDir, DATA_FILENAME)
const data = await readJsonFile(dataPath)
return Promise.resolve([config, data])
}
export function logDifferencesToConsole(
svgRecord: SvgRecord,
validationResult: VerifyResultDifference
): void {
console.warn(
`Svg was different for ${svgRecord.chartId}. The difference starts at character ${validationResult.difference.startIndex}.
Reference: ${validationResult.difference.referenceSvgFragment}
Current : ${validationResult.difference.newSvgFragment}`
)
}
export async function getReferenceCsvContentMap(
referenceDir: string
): Promise<Map<number, SvgRecord>> {
const results = await fs.readFile(
path.join(referenceDir, RESULTS_FILENAME),
"utf-8"
)
const csvContentArray = results
.split("\n")
.splice(1)
.map((line): [number, SvgRecord] => {
const items = line.split(",")
const chartId = parseInt(items[0])
return [
chartId,
{
chartId: chartId,
slug: items[1],
chartType: items[2] as ChartTypeName,
md5: items[3],
svgFilename: items[4],
},
]
})
const csvContentMap = new Map<number, SvgRecord>(csvContentArray)
return csvContentMap
}
export async function writeResultsCsvFile(
outDir: string,
svgRecords: SvgRecord[]
): Promise<void> {
const resultsPath = path.join(outDir, RESULTS_FILENAME)
const csvFileStream = fs.createWriteStream(resultsPath)
csvFileStream.write(SVG_CSV_HEADER + "\n")
for (const row of svgRecords) {
const line = `${row.chartId},${row.slug},${row.chartType},${row.md5},${row.svgFilename}`
csvFileStream.write(line + "\n")
}
csvFileStream.end()
await finished(csvFileStream)
csvFileStream.close()
}
export interface RenderJobDescription {
dir: JobDirectory
referenceEntry: SvgRecord
referenceDir: string
outDir: string
verbose: boolean
}
export async function renderAndVerifySvg({
dir,
referenceEntry,
referenceDir,
outDir,
verbose,
}: RenderJobDescription): Promise<VerifyResult> {
try {
if (!dir) throw "Dir was not defined"
if (!referenceEntry) throw "ReferenceEntry was not defined"
if (!referenceDir) throw "ReferenceDir was not defined"
if (!outDir) throw "outdir was not defined"
const [svg, svgRecord] = await renderSvg(dir.pathToProcess)
const validationResult = await verifySvg(
svg,
svgRecord,
referenceEntry,
referenceDir,
verbose
)
// verifySvg returns a Result type - if it is success we don't care any further
// but if there was an error then we write the svg and a message to stderr
switch (validationResult.kind) {
case "difference":
logDifferencesToConsole(svgRecord, validationResult)
const outputPath = path.join(outDir, svgRecord.svgFilename)
const cleanedSvg = prepareSvgForComparision(svg)
await fs.writeFile(outputPath, cleanedSvg)
}
return Promise.resolve(validationResult)
} catch (err) {
return Promise.resolve(
resultError(referenceEntry.chartId, err as Error)
)
}
}
export async function prepareVerifyRun(
rawGrapherIds: string,
inDir: string
): Promise<JobDirectory[]> {
const grapherIds: number[] = getGrapherIdListFromString(rawGrapherIds)
const directoriesToProcess = await decideDirectoriesToVerify(
grapherIds,
inDir
)
return directoriesToProcess
}
export function displayVerifyResultsAndGetExitCode(
validationResults: VerifyResult[],
verbose: boolean,
directoriesToProcess: JobDirectory[]
): number {
let returnCode: number
const errorResults = validationResults.filter(
(result) => result.kind === "error"
) as VerifyResultError[]
const differenceResults = validationResults.filter(
(result) => result.kind === "difference"
) as VerifyResultDifference[]
if (errorResults.length === 0 && differenceResults.length === 0) {
logIfVerbose(
verbose,
`There were no differences in all ${directoriesToProcess.length} graphs processed`
)
returnCode = 0
} else {
if (errorResults.length) {
console.warn(
`${errorResults.length} graphs threw errors: ${errorResults
.map((err) => err.graphId)
.join()}`
)
for (const result of errorResults) {
console.log(result.graphId?.toString(), result.error) // write to stdout one grapher id per file for easy piping to other processes
}
}
if (differenceResults.length) {
console.warn(
`${
differenceResults.length
} graphs had differences: ${differenceResults
.map((err) => err.difference.chartId)
.join()}`
)
for (const result of differenceResults) {
console.log("", result.difference.chartId) // write to stdout one grapher id per file for easy piping to other processes
}
}
returnCode = errorResults.length + differenceResults.length
}
return returnCode
} | the_stack |
import { test } from '@japa/runner'
import { ApplicationContract } from '@ioc:Adonis/Core/Application'
import { setup, cleanup, getDb, getBaseSchema, setupApplication, fs } from '../../test-helpers'
let db: ReturnType<typeof getDb>
let app: ApplicationContract
test.group('Schema', (group) => {
group.each.setup(async () => {
app = await setupApplication()
db = getDb(app)
await setup()
})
group.each.teardown(async () => {
await db.manager.closeAll()
await cleanup()
await fs.cleanup()
})
test('get schema queries defined inside the up method in dry run', async ({ assert }) => {
class UsersSchema extends getBaseSchema() {
public up() {
this.schema.createTable('users', (table) => {
table.increments('id')
table.string('username')
})
}
}
const schema = new UsersSchema(db.connection(), 'users.ts', true)
const queries = await schema.execUp()
const knexSchema = db
.connection()
.schema.createTable('users', (table) => {
table.increments('id')
table.string('username')
})
.toQuery()
assert.deepEqual(queries, [knexSchema])
})
test('get schema queries defined inside the down method in dry run', async ({ assert }) => {
class UsersSchema extends getBaseSchema() {
public down() {
this.schema.dropTable('users')
}
}
const schema = new UsersSchema(db.connection(), 'users.ts', true)
const queries = await schema.execDown()
const knexSchema = db.connection().schema.dropTable('users').toQuery()
assert.deepEqual(queries, [knexSchema])
})
test('get knex raw query builder using now method', async ({ assert }) => {
class UsersSchema extends getBaseSchema() {
public up() {
this.schema.createTable('users', (table) => {
table.increments('id')
table.string('username')
})
}
}
const schema = new UsersSchema(db.connection(), 'users.ts', true)
assert.equal(schema.now().toQuery(), 'CURRENT_TIMESTAMP')
})
test('do not execute defer calls in dry run', async ({ assert }) => {
assert.plan(1)
class UsersSchema extends getBaseSchema() {
public up() {
assert.isTrue(true)
this.defer(() => {
throw new Error('Not expected to be invoked')
})
}
}
const schema = new UsersSchema(db.connection(), 'foo.ts', true)
await schema.execUp()
})
test('execute up method queries on a given connection', async ({ assert }) => {
class UsersSchema extends getBaseSchema() {
public up() {
this.schema.createTable('schema_users', (table) => {
table.increments('id')
table.string('username')
})
this.schema.createTable('schema_accounts', (table) => {
table.increments('id')
table.integer('user_id').unsigned().references('schema_users.id')
})
}
}
const trx = await db.transaction()
const schema = new UsersSchema(trx, 'users.ts', false)
try {
await schema.execUp()
await trx.commit()
} catch (error) {
await trx.rollback()
}
const hasUsers = await db.connection().schema.hasTable('schema_users')
const hasAccounts = await db.connection().schema.hasTable('schema_accounts')
await db.connection().schema.dropTable('schema_accounts')
await db.connection().schema.dropTable('schema_users')
assert.isTrue(hasUsers)
assert.isTrue(hasAccounts)
})
test('execute up method deferred actions in correct sequence', async ({ assert }) => {
class UsersSchema extends getBaseSchema() {
public up() {
this.schema.createTable('schema_users', (table) => {
table.increments('id')
table.string('username')
})
this.defer(async () => {
await this.db.table('schema_users').insert({ username: 'virk' })
})
this.schema.createTable('schema_accounts', (table) => {
table.increments('id')
table.integer('user_id').unsigned().references('schema_users.id')
})
}
}
const trx = await db.transaction()
const schema = new UsersSchema(trx, 'users.ts', false)
try {
await schema.execUp()
await trx.commit()
} catch (error) {
await trx.rollback()
}
const user = await db.connection().query().from('schema_users').first()
assert.equal(user.username, 'virk')
await db.connection().schema.dropTable('schema_accounts')
await db.connection().schema.dropTable('schema_users')
})
test('execute down method queries on a given connection', async ({ assert }) => {
class UsersSchema extends getBaseSchema() {
public up() {
this.schema.createTable('schema_users', (table) => {
table.increments('id')
table.string('username')
})
this.schema.createTable('schema_accounts', (table) => {
table.increments('id')
table.integer('user_id').unsigned().references('schema_users.id')
})
}
public down() {
if (this.db.dialect.name !== 'sqlite3') {
this.schema.table('schema_accounts', (table) => {
table.dropForeign(['user_id'])
})
}
this.schema.dropTable('schema_users')
this.schema.dropTable('schema_accounts')
}
}
await new UsersSchema(db.connection(), 'users.ts', false).execUp()
const trx = await db.transaction()
const schema = new UsersSchema(trx, 'users.ts', false)
try {
await schema.execDown()
await trx.commit()
} catch (error) {
await trx.rollback()
console.log(error)
}
const hasUsers = await db.connection().schema.hasTable('schema_users')
const hasAccounts = await db.connection().schema.hasTable('schema_accounts')
assert.isFalse(hasUsers)
assert.isFalse(hasAccounts)
})
test('use now helper to define default timestamp', async ({ assert }) => {
class UsersSchema extends getBaseSchema() {
public up() {
this.schema.createTable('users', (table) => {
table.increments('id')
table.timestamp('created_at').defaultTo(this.now())
})
}
}
const schema = new UsersSchema(db.connection(), 'users.ts', true)
const queries = await schema.execUp()
const knexSchema = db
.connection()
.schema.createTable('users', (table) => {
table.increments('id')
table.timestamp('created_at').defaultTo(db.connection().getWriteClient().fn.now())
})
.toQuery()
assert.deepEqual(queries, [knexSchema])
})
test('emit db:query event when schema instructions are executed', async ({ assert }) => {
assert.plan(10)
class UsersSchema extends getBaseSchema() {
public up() {
this.schema.createTable('schema_users', (table) => {
table.increments('id')
table.string('username')
})
this.schema.createTable('schema_accounts', (table) => {
table.increments('id')
table.integer('user_id').unsigned().references('schema_users.id')
})
}
}
const trx = await db.transaction()
trx.debug = true
const schema = new UsersSchema(trx, 'users.ts', false)
app.container.use('Adonis/Core/Event').on('db:query', (query) => {
assert.property(query, 'sql')
assert.isTrue(query.inTransaction)
assert.equal(query.connection, 'primary')
assert.property(query, 'duration')
assert.equal(query.method, 'create')
})
try {
await schema.execUp()
await trx.commit()
} catch (error) {
await trx.rollback()
}
await db.connection().schema.dropTable('schema_accounts')
await db.connection().schema.dropTable('schema_users')
})
test('do not emit db:query debugging is turned off', async () => {
class UsersSchema extends getBaseSchema() {
public up() {
this.schema.createTable('schema_users', (table) => {
table.increments('id')
table.string('username')
})
this.schema.createTable('schema_accounts', (table) => {
table.increments('id')
table.integer('user_id').unsigned().references('schema_users.id')
})
}
}
const trx = await db.transaction()
trx.debug = false
const schema = new UsersSchema(trx, 'users.ts', false)
app.container.use('Adonis/Core/Event').on('db:query', () => {
throw new Error('Never expected to reach here')
})
try {
await schema.execUp()
await trx.commit()
} catch (error) {
await trx.rollback()
}
await db.connection().schema.dropTable('schema_accounts')
await db.connection().schema.dropTable('schema_users')
})
test('emit db:query when enabled on the schema', async ({ assert }) => {
assert.plan(10)
class UsersSchema extends getBaseSchema() {
public debug = true
public up() {
this.schema.createTable('schema_users', (table) => {
table.increments('id')
table.string('username')
})
this.schema.createTable('schema_accounts', (table) => {
table.increments('id')
table.integer('user_id').unsigned().references('schema_users.id')
})
}
}
const trx = await db.transaction()
const schema = new UsersSchema(trx, 'users.ts', false)
app.container.use('Adonis/Core/Event').on('db:query', (query) => {
assert.property(query, 'sql')
assert.isTrue(query.inTransaction)
assert.equal(query.connection, 'primary')
assert.property(query, 'duration')
assert.equal(query.method, 'create')
})
try {
await schema.execUp()
await trx.commit()
} catch (error) {
await trx.rollback()
}
await db.connection().schema.dropTable('schema_accounts')
await db.connection().schema.dropTable('schema_users')
})
test('define index predicate as knex query', async ({ assert }) => {
class UsersSchema extends getBaseSchema() {
public up() {
this.schema.createTable('users', (table) => {
table.increments('id')
table.index(['name', 'last_name'], 'idx_name_last_name', {
indexType: 'FULLTEXT',
storageEngineIndexType: 'hash',
predicate: this.knex().whereNotNull('email'),
})
})
}
}
const schema = new UsersSchema(db.connection(), 'users.ts', true)
const queries = await schema.execUp()
const knexSchema = db
.connection()
.schema.createTable('users', (table) => {
table.increments('id')
table.index(['name', 'last_name'], 'idx_name_last_name', {
indexType: 'FULLTEXT',
storageEngineIndexType: 'hash',
predicate: db.connection().knexQuery().whereNotNull('email'),
})
})
.toQuery()
assert.deepEqual(queries, [knexSchema])
})
}) | the_stack |
import * as util from 'util';
import * as moment from 'moment';
import * as _ from 'lodash';
// Local
import Messages = require('../messages');
const messages = Messages();
import logger = require('../core/logApi');
import pkgUtils = require('./packageUtils');
// Stripping CodeCoverage, HasPassedCodeCoverageCheck as they are causing a perf issue in 47.0+ W-6997762
const DEFAULT_SELECT =
'SELECT Id, Package2Id, SubscriberPackageVersionId, Name, Package2.Name, Package2.NamespacePrefix, ' +
'Description, Tag, Branch, MajorVersion, MinorVersion, PatchVersion, BuildNumber, IsReleased, ' +
'CreatedDate, LastModifiedDate, IsPasswordProtected, AncestorId, ValidationSkipped ' +
'FROM Package2Version';
const VERBOSE_SELECT =
'SELECT Id, Package2Id, SubscriberPackageVersionId, Name, Package2.Name, Package2.NamespacePrefix, ' +
'Description, Tag, Branch, MajorVersion, MinorVersion, PatchVersion, BuildNumber, IsReleased, ' +
'CreatedDate, LastModifiedDate, IsPasswordProtected, CodeCoverage, HasPassedCodeCoverageCheck, AncestorId, ValidationSkipped, ' +
'ConvertedFromVersionId, Package2.IsOrgDependent, ReleaseVersion, BuildDurationInSeconds, HasMetadataRemoved ' +
'FROM Package2Version';
const DEFAULT_ORDER_BY_FIELDS = 'Package2Id, Branch, MajorVersion, MinorVersion, PatchVersion, BuildNumber';
class PackageVersionListCommand {
static DEFAULT_SELECT = DEFAULT_SELECT;
static VERBOSE_SELECT = VERBOSE_SELECT;
static DEFAULT_ORDER_BY_FIELDS = DEFAULT_ORDER_BY_FIELDS;
// TODO: proper property typing
// eslint-disable-next-line no-undef
[property: string]: any;
constructor() {
this.logger = logger.child('package:version:list');
this.results = [];
this.verbose = false;
this.concise = false;
}
execute(context) {
return this._execute(context).catch((err) => {
// TODO
// until package2 is GA, wrap perm-based errors w/ 'contact sfdc' action (REMOVE once package2 is GA'd)
throw pkgUtils.applyErrorAction(err);
});
}
_execute(context) {
this.org = context.org;
this.force = context.org.force;
this.verbose = context.flags.verbose;
this.concise = context.flags.concise;
return this.force.toolingQuery(this.org, this._constructQuery(context.flags)).then(async (queryResult) => {
const records = queryResult.records;
if (records && records.length > 0) {
let ancestorVersionsMap;
let containerOptionsMap;
// lookup ancestorVersions if ancestorIds are present
const ancestorIds = records.filter((record) => record.AncestorId).map((record) => record.AncestorId);
if (ancestorIds && ancestorIds.length > 0) {
ancestorVersionsMap = await pkgUtils.getPackageVersionStrings(ancestorIds, this.force, this.org);
}
// Get the container options for each package version. We need this for determining if the version is OrgDependent
const recordIds = [...new Set(records.map((record) => record.Package2Id))];
containerOptionsMap = await pkgUtils.getContainerOptions(recordIds, this.force, this.org);
records.forEach((record) => {
const ids = [record.Id, record.SubscriberPackageVersionId];
const aliases = [];
ids.forEach((id) => {
const matches = pkgUtils.getPackageAliasesFromId(id, this.force);
if (matches.length > 0) {
aliases.push(matches);
}
});
const AliasStr = aliases.length > 0 ? aliases.join() : '';
// set Ancestor display values
let ancestorVersion = null;
if (record.AncestorId) {
ancestorVersion = ancestorVersionsMap.get(record.AncestorId);
} else if (containerOptionsMap.get(record.Package2Id) !== 'Managed') {
// display N/A if package is unlocked
ancestorVersion = 'N/A';
record.AncestorId = 'N/A';
}
const codeCoverage =
record.CodeCoverage != null
? `${record.CodeCoverage['apexCodeCoveragePercentage']}%`
: record.Package2.IsOrgDependent === true || record.ValidationSkipped === true
? 'N/A'
: '';
const hasPassedCodeCoverageCheck =
record.Package2.IsOrgDependent === true || record.ValidationSkipped === true
? 'N/A'
: record.HasPassedCodeCoverageCheck;
const isOrgDependent =
containerOptionsMap.get(record.Package2Id) === 'Managed'
? 'N/A'
: record.Package2.IsOrgDependent === true
? 'Yes'
: 'No';
const hasMetadataRemoved =
containerOptionsMap.get(record.Package2Id) !== 'Managed'
? 'N/A'
: record.HasMetadataRemoved === true
? 'Yes'
: 'No';
this.results.push({
Package2Id: record.Package2Id,
Branch: record.Branch,
Tag: record.Tag,
MajorVersion: record.MajorVersion,
MinorVersion: record.MinorVersion,
PatchVersion: record.PatchVersion,
BuildNumber: record.BuildNumber,
Id: record.Id,
SubscriberPackageVersionId: record.SubscriberPackageVersionId,
ConvertedFromVersionId: record.ConvertedFromVersionId,
Name: record.Name,
NamespacePrefix: record.Package2.NamespacePrefix,
Package2Name: record.Package2.Name,
Description: record.Description,
Version: [record.MajorVersion, record.MinorVersion, record.PatchVersion, record.BuildNumber].join('.'),
// Table output needs string false to display 'false'
IsPasswordProtected: context.flags.json
? record.IsPasswordProtected
: record.IsPasswordProtected.toString(),
IsReleased: context.flags.json ? record.IsReleased : record.IsReleased.toString(),
CreatedDate: moment(record.CreatedDate).format('YYYY-MM-DD HH:mm'),
LastModifiedDate: moment(record.LastModifiedDate).format('YYYY-MM-DD HH:mm'),
InstallUrl: pkgUtils.INSTALL_URL_BASE + record.SubscriberPackageVersionId,
CodeCoverage: codeCoverage,
HasPassedCodeCoverageCheck: hasPassedCodeCoverageCheck,
ValidationSkipped: record.ValidationSkipped,
AncestorId: record.AncestorId,
AncestorVersion: ancestorVersion,
Alias: AliasStr,
IsOrgDependent: isOrgDependent,
ReleaseVersion: record.ReleaseVersion == null ? '' : Number.parseFloat(record.ReleaseVersion).toFixed(1),
BuildDurationInSeconds: record.BuildDurationInSeconds == null ? '' : record.BuildDurationInSeconds,
HasMetadataRemoved: hasMetadataRemoved,
});
});
}
return this.results;
});
}
_getLastDays(paramName, lastDays) {
if (isNaN(lastDays)) {
throw new Error(messages.getMessage('invalidDaysNumber', paramName, 'packaging'));
}
if (parseInt(lastDays, 10) < 0) {
throw new Error(messages.getMessage('invalidDaysNumber', paramName, 'packaging'));
}
return lastDays;
}
// construct custom WHERE clause parts
_constructWhere(idsOrAliases, createdLastDays, lastModLastDays) {
const where = [];
// filter on given package ids
if (idsOrAliases) {
// split and remove dups
if (util.isString(idsOrAliases)) {
idsOrAliases = idsOrAliases.split(',');
}
idsOrAliases = _.uniq(idsOrAliases);
// resolve any aliases
const packageIds = idsOrAliases.map((idOrAlias) => pkgUtils.getPackageIdFromAlias(idOrAlias, this.force));
// validate ids
packageIds.forEach((packageid) => {
pkgUtils.validateId(pkgUtils.BY_LABEL.PACKAGE_ID, packageid);
});
// stash where part
if (packageIds.length > 1) {
where.push(`Package2Id IN ('${packageIds.join("','")}')`);
} else {
where.push(`Package2Id = '${packageIds[0]}'`);
}
}
// filter on created date, days ago: 0 for today, etc
if (!util.isNullOrUndefined(createdLastDays)) {
createdLastDays = this._getLastDays('createdlastdays', createdLastDays);
where.push(`CreatedDate = LAST_N_DAYS:${createdLastDays}`);
}
// filter on last mod date, days ago: 0 for today, etc
if (!util.isNullOrUndefined(lastModLastDays)) {
lastModLastDays = this._getLastDays('modifiedlastdays', lastModLastDays);
where.push(`LastModifiedDate = LAST_N_DAYS:${lastModLastDays}`);
}
// exclude deleted
where.push('IsDeprecated = false');
return where;
}
// assemble query
_assembleQueryParts(select, where = [], orderBy = '') {
let wherePart = '';
if (where.length > 0) {
wherePart = ` WHERE ${where.join(' AND ')}`;
}
const query = `${select}${wherePart}${orderBy}`;
logger.debug(query);
return query;
}
// construct query based on given params
_constructQuery(flags: any = {}) {
// construct custom WHERE clause, if applicable
const where = this._constructWhere(flags.packages, flags.createdlastdays, flags.modifiedlastdays);
if (flags.released) {
where.push('IsReleased = true');
}
// construct ORDER BY clause
// TODO: validate given fields
const orderBy = ` ORDER BY ${flags.orderby ? flags.orderby : DEFAULT_ORDER_BY_FIELDS}`;
return this._assembleQueryParts(flags.verbose === true ? VERBOSE_SELECT : DEFAULT_SELECT, where, orderBy);
}
/**
* indicates that the human readable message should be tabular
*
* @returns {[{}...]}
*/
getColumnData() {
this.logger.styledHeader(this.logger.color.blue(`Package Versions [${this.results.length}]`));
if (this.concise) {
return [
{
key: 'Package2Id',
label: messages.getMessage('packageId', [], 'package_version_list'),
},
{
key: 'Version',
label: messages.getMessage('version', [], 'package_version_list'),
},
{
key: 'SubscriberPackageVersionId',
label: messages.getMessage('subscriberPackageVersionId', [], 'package_version_list'),
},
{ key: 'IsReleased', label: 'Released' },
];
}
const columns = [
{ key: 'Package2Name', label: 'Package Name' },
{ key: 'NamespacePrefix', label: 'Namespace' },
{ key: 'Name', label: 'Version Name' },
{
key: 'Version',
label: messages.getMessage('version', [], 'package_version_list'),
},
{
key: 'SubscriberPackageVersionId',
label: messages.getMessage('subscriberPackageVersionId', [], 'package_version_list'),
},
{
key: 'Alias',
label: messages.getMessage('alias', [], 'package_version_list'),
},
{
key: 'IsPasswordProtected',
label: messages.getMessage('installKey', [], 'package_version_list'),
},
{ key: 'IsReleased', label: 'Released' },
{
key: 'ValidationSkipped',
label: messages.getMessage('validationSkipped', [], 'package_version_list'),
},
{ key: 'AncestorId', label: 'Ancestor' },
{ key: 'AncestorVersion', label: 'Ancestor Version' },
{
key: 'Branch',
label: messages.getMessage('packageBranch', [], 'package_version_list'),
},
];
if (this.verbose) {
columns.push({
key: 'Package2Id',
label: messages.getMessage('packageId', [], 'package_version_list'),
});
columns.push({
key: 'InstallUrl',
label: messages.getMessage('installUrl', [], 'package_version_list'),
});
columns.push({
key: 'Id',
label: messages.getMessage('id', [], 'package_version_list'),
});
columns.push({ key: 'CreatedDate', label: 'Created Date' });
columns.push({ key: 'LastModifiedDate', label: 'Last Modified Date' });
columns.push({
key: 'Tag',
label: messages.getMessage('packageTag', [], 'package_version_list'),
});
columns.push({
key: 'Description',
label: messages.getMessage('description', [], 'package_version_list'),
});
columns.push({
key: 'CodeCoverage',
label: messages.getMessage('codeCoverage', [], 'package_version_list'),
});
columns.push({
key: 'HasPassedCodeCoverageCheck',
label: messages.getMessage('hasPassedCodeCoverageCheck', [], 'package_version_list'),
});
columns.push({
key: 'ConvertedFromVersionId',
label: messages.getMessage('convertedFromVersionId', [], 'package_version_list'),
});
columns.push({
key: 'IsOrgDependent',
label: messages.getMessage('isOrgDependent', [], 'package_list'),
});
columns.push({
key: 'ReleaseVersion',
label: messages.getMessage('releaseVersion', [], 'package_version_list'),
});
columns.push({
key: 'BuildDurationInSeconds',
label: messages.getMessage('buildDurationInSeconds', [], 'package_version_list'),
});
columns.push({
key: 'HasMetadataRemoved',
label: messages.getMessage('hasMetadataRemoved', [], 'package_version_list'),
});
}
return columns;
}
}
export = PackageVersionListCommand; | the_stack |
import * as React from 'react';
import { Label } from 'office-ui-fabric-react/lib/Label';
import { IChoiceGroupOption } from 'office-ui-fabric-react/lib/ChoiceGroup';
import { Spinner, SpinnerSize, SpinnerType } from 'office-ui-fabric-react/lib/Spinner';
import { Async } from 'office-ui-fabric-react/lib/Utilities';
import { Checkbox } from 'office-ui-fabric-react/lib/Checkbox';
import { IPropertyFieldListMultiPickerHostProps, IPropertyFieldListMultiPickerHostState } from './IPropertyFieldListMultiPickerHost';
import { ISPLists, ISPList } from './IPropertyFieldListPickerHost';
import SPListPickerService from '../../services/SPListPickerService';
import FieldErrorMessage from '../errorMessage/FieldErrorMessage';
import * as telemetry from '../../common/telemetry';
import { IPropertyFieldList } from './IPropertyFieldListPicker';
import { setPropertyValue } from '../../helpers/GeneralHelper';
/**
* Renders the controls for PropertyFieldSPListMultiplePicker component
*/
export default class PropertyFieldListMultiPickerHost extends React.Component<IPropertyFieldListMultiPickerHostProps, IPropertyFieldListMultiPickerHostState> {
private loaded: boolean = false;
private async: Async;
private delayedValidate: (value: string[]) => void;
/**
* Constructor
*/
constructor(props: IPropertyFieldListMultiPickerHostProps) {
super(props);
telemetry.track('PropertyFieldListMultiPicker', {
disabled: props.disabled
});
this.onChanged = this.onChanged.bind(this);
this.onSelectAllChanged = this.onSelectAllChanged.bind(this);
this.state = {
loadedLists: {
value: []
},
results: [],
selectedKeys: [],
loaded: this.loaded,
errorMessage: ''
};
this.async = new Async(this);
this.validate = this.validate.bind(this);
this.notifyAfterValidate = this.notifyAfterValidate.bind(this);
this.delayedValidate = this.async.debounce(this.validate, this.props.deferredValidationTime);
}
public componentDidMount() {
this.loadLists();
}
public componentDidUpdate(prevProps: IPropertyFieldListMultiPickerHostProps, prevState: IPropertyFieldListMultiPickerHostState): void {
if (this.props.baseTemplate !== prevProps.baseTemplate ||
this.props.webAbsoluteUrl !== prevProps.webAbsoluteUrl) {
this.loadLists();
}
}
/**
* Loads the list from SharePoint current web site, or target site if specified by webRelativeUrl
*/
private async loadLists(): Promise<void> {
const {
context,
selectedLists
} = this.props;
// Builds the SharePoint List service
const listService: SPListPickerService = new SPListPickerService(this.props, context);
const listsToExclude: string[] = this.props.listsToExclude || [];
let selectedListsKeys: string[] = [];
if (selectedLists && selectedLists.length) {
const firstItem = selectedLists[0];
if (typeof firstItem === 'string') {
selectedListsKeys = selectedLists as string[];
}
else {
selectedListsKeys = (selectedLists as IPropertyFieldList[]).map(sl => sl.id);
}
}
const options: IChoiceGroupOption[] = [];
const selectedKeys: string[] = [];
// Gets the libs
const response = await listService.getLibs();
response.value.forEach((list: ISPList) => {
let isSelected: boolean = false;
let indexInExisting: number = -1;
// Defines if the current list must be selected by default
if (selectedListsKeys) {
indexInExisting = selectedListsKeys.indexOf(list.Id);
}
if (indexInExisting > -1) {
isSelected = true;
selectedKeys.push(list.Id);
}
// Add the option to the list if not inside the 'listsToExclude' array
if (listsToExclude.indexOf(list.Title) === -1 && listsToExclude.indexOf(list.Id) === -1) {
options.push({
key: list.Id,
text: list.Title,
checked: isSelected
});
}
});
this.loaded = true;
this.setState({
loadedLists: response,
results: options,
selectedKeys: selectedKeys,
loaded: true
});
}
/**
* Raises when a list has been selected
*/
private onChanged(element: React.FormEvent<HTMLElement>, isChecked: boolean): void {
if (element) {
const value: string = (element.currentTarget as any).value;
let selectedKeys = this.state.selectedKeys;
// Check if the element is selected
if (isChecked === false) {
// Remove the unselected item
selectedKeys = selectedKeys.filter(s => s !== value);
} else {
// Add the selected item and filter out the doubles
selectedKeys.push(value);
selectedKeys = selectedKeys.filter((item, pos, self) => {
return self.indexOf(item) === pos;
});
}
// Update the state and validate
this.setState({
selectedKeys: selectedKeys
});
this.delayedValidate(selectedKeys);
}
}
/**
* Raises when the select all checkbox is changed
*/
private onSelectAllChanged(element: React.FormEvent<HTMLElement>, isChecked: boolean): void {
if (element) {
let selectedKeys = new Array<string>();
const {
results
} = this.state;
if (isChecked === true) {
results.forEach((value: IChoiceGroupOption) => {
selectedKeys.push(value.key);
});
}
this.setState({
selectedKeys: selectedKeys
});
this.delayedValidate(selectedKeys);
}
}
/**
* Validates the new custom field value
*/
private validate(value: string[]): void {
if (this.props.onGetErrorMessage === null || typeof this.props.onGetErrorMessage === 'undefined') {
this.notifyAfterValidate(value);
return;
}
const errResult: string | PromiseLike<string> = this.props.onGetErrorMessage(value || []);
if (typeof errResult !== 'undefined') {
if (typeof errResult === 'string') {
if (errResult === '') {
this.notifyAfterValidate(value);
}
this.setState({
errorMessage: errResult
});
} else {
errResult.then((errorMessage: string) => {
if (typeof errorMessage === 'undefined' || errorMessage === '') {
this.notifyAfterValidate(value);
}
this.setState({
errorMessage: errorMessage
});
});
}
} else {
this.notifyAfterValidate(value);
}
}
/**
* Notifies the parent Web Part of a property value change
*/
private notifyAfterValidate(newValue: string[]) {
const {
onPropertyChange,
onChange,
selectedLists,
targetProperty,
properties,
includeListTitleAndUrl
} = this.props;
const {
loadedLists
} = this.state;
let propValue: string[] | IPropertyFieldList[] | undefined;
if (!newValue || !newValue.length) {
propValue = [];
}
else {
if (includeListTitleAndUrl) {
propValue = loadedLists.value.filter(l => newValue.indexOf(l.Id) !== -1).map(l => {
return {
id: l.Id,
title: l.Title,
url: l.RootFolder.ServerRelativeUrl
};
});
}
else {
propValue = [...newValue];
}
}
if (onPropertyChange && newValue !== null) {
setPropertyValue(properties, targetProperty, propValue);
onPropertyChange(targetProperty, selectedLists, propValue);
// Trigger the apply button
if (typeof onChange !== 'undefined' && onChange !== null) {
onChange(targetProperty, propValue);
}
}
}
/**
* Called when the component will unmount
*/
public componentWillUnmount() {
this.async.dispose();
}
/**
* Renders the SPListMultiplePicker controls with Office UI Fabric
*/
public render(): JSX.Element {
const {
selectedKeys,
results,
errorMessage
} = this.state;
const {
label,
disabled,
showSelectAll,
selectAllInList,
selectAllInListLabel,
targetProperty
} = this.props;
if (this.loaded === false) {
return (
<div>
<Label>{label}</Label>
<Spinner size={SpinnerSize.medium} />
</div>
);
} else {
const styleOfLabel: any = {
color: disabled === true ? '#A6A6A6' : 'auto'
};
// Renders content
return (
<div>
{
(showSelectAll === false || selectAllInList === true) &&
<Label>{label}</Label>
}
{
showSelectAll === true &&
<div style={{ marginBottom: '5px' }} className='ms-ChoiceField'>
<Checkbox
checked={selectedKeys.length === results.length}
label={selectAllInList === true ? selectAllInListLabel : label}
onChange={this.onSelectAllChanged}
styles={{
checkbox: {
backgroundColor: (selectedKeys.length > 0 ? '#f4f4f4' : 'inherit'),
visibility: (selectAllInList === false ? 'hidden' : 'visible')
}
}}
/>
</div>
}
{
results.map((item: IChoiceGroupOption, index: number) => {
const uniqueKey = targetProperty + '-' + item.key;
return (
<div style={{ marginBottom: '5px' }} className='ms-ChoiceField' key={uniqueKey}>
<Checkbox
checked={selectedKeys.indexOf(item.key.toString()) >= 0}
disabled={disabled}
label={item.text}
onChange={this.onChanged}
inputProps={{ value: item.key }}
/>
</div>
);
})
}
<FieldErrorMessage errorMessage={errorMessage} />
</div>
);
}
}
} | the_stack |
interface Window {
safari: typeof safari;
}
declare namespace safari {
export var application: SafariApplication;
export var extension: SafariExtension;
export var self: SafariExtensionGlobalPage | SafariExtensionBar;
}
interface SafariEvent {
/**
* The type of the event.
* The string used to identify a particular type of event is documented in the reference for that class.
*/
type: string;
/**
* The target of the event.
* This attribute stays the same as the event moves through the event-dispatch hierarchy. Its value is the same as the object that the event is sent to during the targeting phase.
*/
target: SafariEventTarget;
/**
* The object that the event is currently being sent to.
* This attribute varies as the event progresses through the phases, changing as the event moves through the event-dispatch hierarchy.
*/
currentTarget: SafariEventTarget;
/**
* The time and date that the event was created.
*/
timestamp: number;
/**
* The event-handling phase that the event is in.
* The values for this property are the same as the values used by Webkit to identify the event-handling phases.
*/
eventPhase: number;
/**
* A Boolean value that indicates whether the event goes through the bubbling phase.
*/
bubbles: boolean;
/**
* A Boolean value that indicates whether the event can be canceled.
*/
cancelable: boolean;
/**
* A Boolean value that indicates whether the event’s default action has been prevented.
*/
defaultPrevented: boolean;
/**
* Prevents the event from any further propagation.
* Propagation can be stopped only fon cancelable events. After propagation is stopped, the event is not sent to any other targets.
*/
stopPropagation() : void;
/**
* Prevents the browser from performing the default action for an event.
* Use this method to indicate that your extension has already fully handled the event; you don’t want the browser to do anything. Note that preventing the default action does not stop an event from propagating.
*/
preventDefault(): void;
}
interface SafariEventListener extends Function {
(event: SafariEvent): any;
}
interface SafariEventTarget {
addEventListener(type: string, listener: SafariEventListener, useCapture?: boolean): void;
removeEventListener(type: string, listener: SafariEventListener, useCapture?: boolean): void;
}
interface SafariBrowserWindow extends SafariEventTarget {
tabs: Array<SafariBrowserTab>;
visible: boolean;
activeTab: SafariBrowserTab;
activate(): void;
close(): void;
/**
* Opens a new tab in the window.
* Available in Safari 5.0 and later.
* @param visibility Either foreground if the tab should be opened in the foreground, or background if it should be opened in the background.
* @param index The desired location of the new tab.
* @returns A new tab.
*/
openTab (visibility?: string, index?: number): SafariBrowserTab;
insertTab(tab: SafariBrowserTab, index: number): SafariBrowserTab;
}
interface SafariBrowserTab extends SafariEventTarget {
browserWindow: SafariBrowserWindow;
reader: SafariReader;
/**
* The tab’s current title.
* The tab’s title is the same as the title of the webpage in most cases. For example, the title of the webpage may be truncated for display, but the value of this property is not truncated.
* Available in Safari 5.0 and later.
*/
title: string;
page: SafariWebPageProxy;
/**
* The URL loaded in this tab.
* Setting this attribute to a new value loads the page at the new URL in the tab.
* Available in Safari 5.0 and later.
*/
url: string;
visibleContentsAsDataURL(): string;
activate(): void;
close(): void;
}
interface SafariReader extends SafariEventTarget {
available: boolean;
tab: SafariBrowserTab;
visible: boolean;
enter(): void;
exit(): void;
dispatchMessage (name: string, message?: any): void;
}
interface SafariWebPageProxy {
dispatchMessage (name: string, message?: any): void;
}
interface SafariExtensionGlobalPage {
contentWindow: Window;
}
interface SafariExtensionPopover extends SafariEventTarget {
identifier: string;
visible: boolean;
contentWindow: Window;
height: number;
width: number;
hide(): void;
}
interface SafariExtensionMenu {
identifier: string;
menuItems: Array<SafariExtensionMenuItem>;
visible: boolean;
appendMenuItem (identifier: string, title: string, command?: string): SafariExtensionMenuItem;
appendSeparator (identifier: string): SafariExtensionMenuItem;
insertMenuItem (index: number, identifier: string, title: string, command?: string): SafariExtensionMenuItem;
insertSeparator (index: number, identifier: string): SafariExtensionMenuItem;
removeMenuItem (index: number): void;
}
interface SafariExtensionMenuItem extends SafariEventTarget {
command: string;
identifier: string;
separator: boolean;
title: string;
image: string;
submenu: SafariExtensionMenu;
visible: boolean;
disabled: boolean;
checkedState: number;
}
interface SafariExtensionSettings extends SafariEventTarget {
[index: string]: any;
[index: number]: any;
getItem(key: string): any;
setItem(key: string, value: any): void;
removeItem(key: string): void;
clear(): void;
}
interface SafariExtensionSecureSettings extends SafariEventTarget {
[index: string]: any;
getItem(key: string): any;
setItem(key: string, value: any): void;
removeItem(key: string): void;
clear(): void;
}
interface SafariExtensionBar extends SafariEventTarget {
identifier: string;
label: string;
visible: boolean;
browserWindow: SafariBrowserWindow;
contentWindow: Window;
hide(doNotRemember?: boolean): void;
show(doNotRemember?: boolean): void;
}
interface SafariExtensionToolbarItem extends SafariEventTarget {
/**
* The current badge number.
*/
badge: number;
/**
* The URL of the current image.
*/
image: string;
/**
* The label of the toolbar item, as shown in the toolbar’s overflow menu.
*/
label: string;
/**
* The label of the toolbar item, as shown in the Customize palette.
* This attribute is optional; its value defaults to the value of label.
*/
paletteLabel: string;
/**
* The tooltip (help tag) of the toolbar item.
* This attribute is optional; its value defaults to the value of label.
*/
toolTip: string;
menu: SafariExtensionMenu;
popover: SafariExtensionPopover;
browserWindow: SafariBrowserWindow;
command: string;
disabled: boolean;
identifier: string;
showMenu(): void;
showPopover(): void;
validate(): void;
}
interface SafariPrivateBrowsing {
enabled: boolean;
}
interface SafariExtension {
bars: Array<SafariExtensionBar>;
baseURI: string;
globalPage: SafariExtensionGlobalPage;
toolbarItems: Array<SafariExtensionToolbarItem>;
displayVersion: string;
bundleVersion: string;
menus: Array<SafariExtensionMenu>;
createMenu (identifier: string): SafariExtensionMenu;
removeMenu (identifier: string): void;
popovers: Array<SafariExtensionPopover>;
createPopover(identifier: string, url: string, width?: number, height?: number): SafariExtensionPopover;
removePopover(identifier: string): void;
addContentScript (source: string, whitelist: Array<string>, blacklist: Array<string>, runAtEnd: boolean): string;
addContentScriptFromURL (url: string, whitelist: Array<string>, blacklist: Array<string>, runAtEnd: boolean): string;
addContentStyleSheet (source: string, whitelist: Array<string>, blacklist: Array<string>): string;
addContentStyleSheetFromURL (url: string, whitelist: Array<string>, blacklist: Array<string>): string;
removeContentScript(url: string): void;
removeContentScripts(): void;
removeContentStyleSheet(url: string): void;
removeContentStyleSheets(): void;
settings: SafariExtensionSettings;
secureSettings: SafariExtensionSecureSettings;
}
interface SafariApplication extends SafariEventTarget {
activeBrowserWindow: SafariBrowserWindow;
browserWindows: Array<SafariBrowserWindow>;
privateBrowsing: SafariPrivateBrowsing;
openBrowserWindow(): SafariBrowserWindow;
}
interface SafariExtensionContextMenuEvent extends SafariEvent {
/**
* The target of the event.
* This attribute stays the same as the event moves through the event-dispatch hierarchy. Its value is the same as the object that the event is sent to during the targeting phase.
*/
target: SafariExtensionContextMenuItem;
/**
* The object that the event is currently being sent to.
* This attribute varies as the event progresses through the phases, changing as the event moves through the event-dispatch hierarchy.
*/
currentTarget: SafariExtensionContextMenuItem;
/**
* Information about the current context menu event.
*/
userInfo: any;
/**
* The context menu being built up.
*/
contextMenu: SafariExtensionContextMenu;
}
interface SafariExtensionContextMenu {
/**
* Returns a list of the context menu items from this extension.
* Only menu items from your extension are returned.
*/
contextMenuItems: any[];
/**
* Appends a menu item to the contextual menu.
* If another menu item with the same identifier already exists, it is removed before appending the menu item. If command is not supplied, identifier is used as the command identifier.
* @param identifier The unique identifier of the menu item.
* @param title The title of the menu item.
* @param command The command identifier that the context menu item sends when activated.
* @returns The context menu item that was appended.
*/
appendContextMenuItem (identifier: string, title: string, command?: string) : SafariExtensionContextMenuItem;
/**
* Inserts a menu item at a specific index in the contextual menu.
* If another menu item with the same identifier already exists, it is removed before appending the menu item. If command is not supplied, identifier is used as the command identifier.
* @param index The index where the menu item is being inserted.
* @param identifier The unique identifier of the menu item.
* @param title The title of the menu item.
* @param command The command identifier that the context menu item sends when activated.
* @returns The context menu item that was inserted.
*/
insertContextMenuItem (index: number, identifier: string, title: string, command?: string): SafariExtensionContextMenuItem;
}
interface SafariExtensionContextMenuItem extends SafariEventTarget {
/**
* The command identifier that the context menu item sends when activated.
* Setting an empty string, null, or undefined has no effect.
*/
command: string;
/**
* A Boolean value that indicates whether a context menu item is disabled.
* Disabled menu items are not displayed in the context menu.
*/
disabled: boolean;
/**
* The unique identifier of the context menu item.
*/
identifier: string;
/**
* The title displayed in the context menu.
*/
title: string;
}
interface SafariValidateEvent extends SafariEvent {
/**
* The command identifier of the target being validated.
*/
command: string;
}
interface SafariExtensionContextMenuItemValidateEvent {
/**
* The target of the event.
* This attribute stays the same as the event moves through the event-dispatch hierarchy. Its value is the same as the object that the event is sent to during the targeting phase.
*/
target: SafariExtensionContextMenuItem;
/**
* The object that the event is currently being sent to.
* This attribute varies as the event progresses through the phases, changing as the event moves through the event-dispatch hierarchy.
*/
currentTarget: SafariExtensionContextMenuItem;
/**
* Information about the current context menu event.
*/
userInfo: any;
}
interface SafariCommandEvent extends SafariEvent {
/**
* The command identifier of the target being dispatched.
*/
command: string;
}
interface SafariExtensionContextMenuItemCommandEvent extends SafariCommandEvent {
/**
* The target of the event.
* This attribute stays the same as the event moves through the event-dispatch hierarchy. Its value is the same as the object that the event is sent to during the targeting phase.
*/
target: SafariExtensionContextMenuItem;
/**
* The object that the event is currently being sent to.
* This attribute varies as the event progresses through the phases, changing as the event moves through the event-dispatch hierarchy.
*/
currentTarget: SafariExtensionContextMenuItem;
/**
* The user info object for this context menu event.
*/
userInfo: any;
}
interface SafariExtensionSettingsChangeEvent extends SafariEvent {
/**
* The target of the event.
* This attribute stays the same as the event moves through the event-dispatch hierarchy. Its value is the same as the object that the event is sent to during the targeting phase.
*/
target: SafariExtensionSettings|SafariExtensionSecureSettings;
/**
* The object that the event is currently being sent to.
* This attribute varies as the event progresses through the phases, changing as the event moves through the event-dispatch hierarchy.
*/
currentTarget: SafariExtensionSettings|SafariExtensionSecureSettings;
/**
* The key identifier of the setting that was changed.
*/
key: string;
/**
* The value before the settings change.
*/
oldValue: any;
/**
* The value after the settings change.
*/
newValue: any;
}
interface SafariExtensionMessageEvent extends SafariEvent {
/**
* The name of the message.
*/
name: string;
/**
* The message data.
*/
message: any;
} | the_stack |
import Component from '@ember/component';
import { computed } from '@ember/object';
import { assert } from '@ember/debug';
import { readOnly } from '@ember/object/computed';
import { getOwner } from '@ember/application';
import { guidFor } from '@ember/object/internals';
import numbro from 'numbro';
import { merge } from 'lodash-es';
import moment from 'moment';
import { run } from '@ember/runloop';
import ChartBuildersBase from './chart-builders-base';
import { VisualizationModel } from 'navi-core/components/navi-visualizations/table';
import { BaseChartBuilder } from '@yavin/c3/chart-builders/base';
import RequestFragment from 'navi-core/models/request';
import { LineChartConfig } from '@yavin/c3/models/line-chart';
import { Grain } from '@yavin/client/utils/date';
import { ChartSeries } from '@yavin/c3/models/chart-visualization';
import NaviFactResponse from '@yavin/client/models/navi-fact-response';
const DEFAULT_OPTIONS = <const>{
style: {
curve: 'line',
area: false,
stacked: false,
},
axis: {
x: {
type: 'category',
categories: [],
tick: {
culling: true,
multiline: false,
},
},
y: {
series: {
type: 'metric',
config: {},
},
tick: {
format: (val: number) => numbro(val).format({ mantissa: 2, average: val > 1000 }),
count: 4,
},
label: {
position: 'outer-middle',
},
},
},
grid: {
y: { show: true },
},
point: {
r: 0,
focus: {
expand: { r: 4 },
},
},
};
export type Args = {
model: VisualizationModel;
options: LineChartConfig['metadata'];
};
export default class LineChart extends ChartBuildersBase<Args> {
/**
* the type of c3 chart
*/
chartType = 'line';
/**
* since line-chart is a tagless wrapper component,
* classes specified here are applied to the underlying c3-chart component
*/
classNames = ['line-chart-widget'];
/**
* builder based on series type
*/
@computed('seriesConfig.type')
get builder(): BaseChartBuilder {
const {
seriesConfig: { type },
chartBuilders,
} = this;
const chartBuilder = chartBuilders[type];
assert(`There should be a chart-builder for ${type}`, chartBuilder);
return chartBuilder;
}
/**
* config options for the chart
*/
@computed('args.options', 'chartTooltip', 'dataConfig', 'xAxisTickValues', 'yAxisDataFormat', 'yAxisLabelConfig')
get config() {
const { pointConfig: point } = this;
//deep merge DEFAULT_OPTIONS, custom options, and data
return merge(
{},
DEFAULT_OPTIONS,
this.args.options,
this.dataConfig,
{ tooltip: this.chartTooltip },
{ point },
{ axis: { x: { type: 'category' } } }, // Override old 'timeseries' config saved in db
this.yAxisLabelConfig,
this.yAxisDataFormat,
this.xAxisTickValues
);
}
/**
* y axis label config options for the chart
*/
@computed('seriesConfig.config.metricCid', 'request.columns.@each.displayName')
get yAxisLabelConfig() {
const { seriesConfig } = this;
if ('metricCid' in seriesConfig.config) {
const { metricCid } = seriesConfig.config;
const metric = this.request.columns.find(({ cid }) => cid === metricCid);
return {
axis: {
y: {
label: {
text: metric?.displayName,
},
},
},
};
}
return {};
}
/**
* options for determining chart series
*/
@computed('args.options')
get seriesConfig(): ChartSeries {
const optionsWithDefault = merge({}, DEFAULT_OPTIONS, this.args.options);
return optionsWithDefault.axis.y.series;
}
@readOnly('args.model.0.request') request!: RequestFragment;
@readOnly('args.model.0.response') response!: NaviFactResponse;
/**
* point radius config options for chart
*/
@computed('response.rows.length')
get pointConfig() {
const pointCount = this.response.rows.length;
return pointCount === 1 ? { r: 3 } : { r: 2 };
}
/**
* chart series data
*/
@computed(
'request.columns.@each.displayName',
'response',
'builder',
'seriesConfig.config.{dimensions.[],metricCid.timeGrain}'
)
get seriesData() {
const { request, response, builder, seriesConfig } = this;
return builder.buildData(response, seriesConfig.config, request);
}
/**
* chart series groups for stacking
*/
@computed(
'args.options',
'seriesConfig.config.{type,dimensions.@each.name}',
'request.metricColumns.@each.displayName'
)
get seriesDataGroups() {
const { request, seriesConfig } = this;
const newOptions = merge({}, DEFAULT_OPTIONS, this.args.options);
const { stacked } = newOptions.style;
if (!stacked) {
return [];
}
// if stacked, return [[ "Dimension 1", "Dimension 2", ... ]] or [[ "Metric 1", "Metric 2", ... ]]
if (seriesConfig.type === 'dimension') {
return [seriesConfig.config.dimensions.map((_, i) => `series.${i}`)];
} else if (seriesConfig.type === 'metric') {
return [request.metricColumns.map((_, i) => `series.${i}`)];
}
return [];
}
/**
* configuration for chart x and y values
*/
@computed('c3ChartType', 'seriesData', 'seriesDataGroups')
get dataConfig() {
const { c3ChartType, seriesData, seriesDataGroups } = this;
/**
* controls the order of stacking which should be the same as order of groups
* `null` will be order the data loaded (object properties) which might not be predictable in some browsers
*/
const order = seriesDataGroups[0] || null;
return {
data: {
type: c3ChartType,
json: seriesData.series,
groups: seriesDataGroups,
names: seriesData.names,
order,
selection: {
enabled: true,
},
},
};
}
/**
* c3 chart type to determine line behavior
*/
@computed('args.options', 'chartType')
get c3ChartType() {
const options = merge({}, DEFAULT_OPTIONS, this.args.options),
{ curve, area } = options.style;
if (curve === 'line') {
return area ? 'area' : 'line';
} else if (curve === 'spline' || curve === 'step') {
return area ? `area-${curve}` : curve;
}
return this.chartType;
}
/**
* name of the tooltip component
*/
get tooltipComponentName() {
const guid = guidFor(this);
const seriesType = this.seriesConfig.type;
const chartType = this.chartType;
return `${chartType}-chart-${seriesType}-tooltip-${guid}`;
}
/**
* component used for rendering HTMLBars templates
*/
@computed('builder', 'dataConfig', 'firstModel', 'tooltipComponentName')
get tooltipComponent() {
const { request, seriesConfig } = this;
const tooltipComponentName = this.tooltipComponentName;
const registryEntry = `component:${tooltipComponentName}`;
const builder = this.builder;
const owner = getOwner(this);
const tooltipComponent = Component.extend(
owner.ownerInjection(),
builder.buildTooltip(seriesConfig.config, request),
{
renderer: owner.lookup('renderer:-dom'),
}
);
if (!owner.lookup(registryEntry)) {
owner.register(registryEntry, tooltipComponent);
}
/*
* Ember 3.x requires components to be registered with the container before they are instantiated.
* Use the factory that has been registered instead of an anonymous component.
*/
return owner.factoryFor(registryEntry);
}
/**
* x axis tick positions for day/week/month grain on year chart grain
*/
get xAxisTickValuesByGrain(): Partial<Record<Grain, number[] | undefined>> {
const dayValues = [];
for (let i = 0; i < 12; i++) {
dayValues.push(moment().startOf('year').month(i).dayOfYear());
}
return {
day: dayValues,
// week.by.year in date-time is hardcoded to YEAR_WITH_53_ISOWEEKS (2015)
week: [1, 5, 9, 13, 18, 22, 26, 31, 35, 39, 44, 48],
month: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
};
}
/**
* explicity specifies x axis tick positions for year chart grain
*/
@computed('request.timeGrain', 'seriesConfig.{type,config.timeGrain}', 'xAxisTickValuesByGrain')
get xAxisTickValues() {
const { seriesConfig } = this;
if (seriesConfig.type !== 'dateTime' || seriesConfig.config.timeGrain !== 'year') {
return {};
}
const requestGrain = this.request?.timeGrain;
const values = requestGrain ? this.xAxisTickValuesByGrain[requestGrain] : undefined;
return {
axis: {
x: {
tick: {
values,
fit: !values,
culling: !values,
},
},
},
};
}
/**
* configuration for tooltip
*/
@computed('dataConfig.data.json', 'firstModel', 'request', 'seriesConfig.config', 'tooltipComponent')
get chartTooltip() {
const rawData = this.dataConfig.data?.json;
const tooltipComponent = this.tooltipComponent;
const request = this.request;
const seriesConfig = this.seriesConfig.config;
return {
contents(tooltipData: { x: number }[]) {
/*
* Since tooltipData.x only contains the index value, map it
* to the raw x value for better formatting
*/
const x = rawData[tooltipData[0].x].x.rawValue;
const tooltip = tooltipComponent.create({
tooltipData,
x,
request,
seriesConfig,
});
run(() => {
tooltip.appendTo(document.createElement('div'));
});
const innerHTML = tooltip.element.innerHTML;
tooltip.destroy();
return innerHTML;
},
};
}
/**
* @param val - number to format
* @returns formatted number
*/
formattingFunction = (val: number) => numbro(val).format({ mantissa: 2, average: val > 1000 });
/**
* adds the formattingFunction to the chart config
*/
@computed('formattingFunction')
get yAxisDataFormat() {
const format = this.formattingFunction;
return { axis: { y: { tick: { format } } } };
}
/**
* Fires before the element is destroyed
* @override
*/
willDestroy() {
super.willDestroy();
this._removeTooltipFromRegistry();
}
/**
* Removes tooltip component from registry
*/
private _removeTooltipFromRegistry() {
const tooltipComponentName = this.tooltipComponentName;
getOwner(this).unregister(`component:${tooltipComponentName}`);
}
} | the_stack |
declare module FabricPlugin {
interface FabricPluginStatic {
/**
* API for interacting with the Crashlytics kit.
*
* https://docs.fabric.io/ios/crashlytics/index.html
*/
Crashlytics: Crashlytics;
/**
* API for interacting with the Answers kit.
*
* https://docs.fabric.io/ios/answers/index.html
*/
Answers: Answers;
}
/**
* API for interacting with the Crashlytics kit.
*
* https://docs.fabric.io/ios/crashlytics/index.html
*/
interface Crashlytics {
/**
* Add logging that will be sent with your crash data. This logging will not show up
* in the system.log and will only be visible in your Crashlytics dashboard.
*/
addLog(message: string): void;
/**
* Used to simulate a native platform crash (useful for testing Crashlytics logging).
*/
sendCrash(): void;
/**
* Used to log a non-fatal error message (Android only).
*/
sendNonFatalCrash(message: string, stacktrace?: any): void;
/**
* Used to record a non-fatal error message (iOS only).
*/
recordError(message: string, code: number): void;
/**
* Sets the user's identifier for logging to Crashlytics backend.
*/
setUserIdentifier(userId: string): void;
/**
* Sets the user's name for logging to Crashlytics backend.
*/
setUserName(userName: string): void;
/**
* Sets the user's email address for logging to Crashlytics backend.
*/
setUserEmail(email: string): void;
/**
* Sets a custom key/value pair for logging to Crashlytics backend.
*/
setStringValueForKey(value: string, key: string): void;
/**
* Sets a custom key/value pair for logging to Crashlytics backend.
*/
setIntValueForKey(value: number, key: string): void;
/**
* Sets a custom key/value pair for logging to Crashlytics backend.
*/
setBoolValueForKey(value: boolean, key: string): void;
/**
* Sets a custom key/value pair for logging to Crashlytics backend.
*/
setFloatValueForKey(value: number, key: string): void;
}
/**
* API for interacting with the Answers kit.
*
* https://docs.fabric.io/ios/answers/index.html
*/
interface Answers {
/**
* Sends the Purchase tracking event.
*
* All parameters are optional.
*
* https://docs.fabric.io/android/answers/answers-events.html#purchase
*
* @param itemPrice The item's amount in the currency specified.
* @param currency The ISO4217 currency code.
* @param success Was the purchase completed succesfully?
* @param itemName The human-readable name for the item.
* @param itemType The category the item falls under.
* @param itemId A unique identifier used to track the item.
* @param attributes Any additional user-defined attributes to be logged.
*/
sendPurchase(itemPrice?: number, currency?: string, success?: boolean, itemName?: string, itemType?: string, itemId?: string, attributes?: Attributes): void;
/**
* Sends the Add To Cart tracking event.
*
* All parameters are optional.
*
* https://docs.fabric.io/android/answers/answers-events.html#add-to-cart
*
* @param itemPrice The item's amount in the currency specified.
* @param currency The ISO4217 currency code.
* @param itemName The human-readable name for the item.
* @param itemType The category the item falls under.
* @param itemId A unique identifier used to track the item.
* @param attributes Any additional user-defined attributes to be logged.
*/
sendAddToCart(itemPrice?: number, currency?: string, itemName?: string, itemType?: string, itemId?: string, attributes?: Attributes): void;
/**
* Sends the Start Checkout tracking event.
*
* All parameters are optional.
*
* https://docs.fabric.io/android/answers/answers-events.html#start-checkout
*
* @param totalPrice The total price of all items in cart in the currency specified.
* @param currency The ISO4217 currency code.
* @param itemCount The count of items in cart.
* @param attributes Any additional user-defined attributes to be logged.
*/
sendStartCheckout(totalPrice?: number, currency?: string, itemCount?: number, attributes?: Attributes): void;
/**
* Sends the Search tracking event.
*
* https://docs.fabric.io/android/answers/answers-events.html#search
*
* @param query What the user is searching for.
* @param attributes Any additional user-defined attributes to be logged.
*/
sendSearch(query: string, attributes?: Attributes): void;
/**
* Sends the Share tracking event.
*
* All parameters are optional.
*
* https://docs.fabric.io/android/answers/answers-events.html#share
*
* @param method The method used to share content.
* @param contentName The description of the content.
* @param contentType The type or genre of content.
* @param contentId A unique key identifying the content.
* @param attributes Any additional user-defined attributes to be logged.
*/
sendShare(method?: string, contentName?: string, contentType?: string, contentId?: string, attributes?: Attributes): void;
/**
* Sends the Rated Content tracking event.
*
* All parameters are optional.
*
* https://docs.fabric.io/android/answers/answers-events.html#rated-content
*
* @param rating An integer rating of the content.
* @param contentName The human-readable name of content.
* @param contentType The category your item falls under.
* @param contentId A unique identifier used to track the item.
* @param attributes Any additional user-defined attributes to be logged.
*/
sendRatedContent(rating?: number, contentName?: string, contentType?: string, contentId?: string, attributes?: Attributes): void;
/**
* Sends the Sign Up tracking event.
*
* All parameters are optional.
*
* https://docs.fabric.io/android/answers/answers-events.html#sign-up
*
* @param method An optional description of the sign up method (Twitter, Facebook, etc.); defaults to "Direct".
* @param success An optional flag that indicates sign up success; defaults to true.
* @param attributes Any additional user-defined attributes to be logged.
*/
sendSignUp(method?: string, success?: boolean, attributes?: Attributes): void;
/**
* Sends the Log In tracking event.
*
* All parameters are optional.
*
* https://docs.fabric.io/android/answers/answers-events.html#log-in
*
* @param method An optional description of the sign in method (Twitter, Facebook, etc.); defaults to "Direct".
* @param success An optional flag that indicates sign in success; defaults to true.
* @param attributes Any additional user-defined attributes to be logged.
*/
sendLogIn(method?: string, success?: boolean, attributes?: Attributes): void;
/**
* Sends the Invite tracking event.
*
* All parameters are optional.
*
* https://docs.fabric.io/android/answers/answers-events.html#invite
*
* @param method An optional description of the sign in method (Twitter, Facebook, etc.); defaults to "Direct".
* @param attributes Any additional user-defined attributes to be logged.
*/
sendInvite(method?: string, attributes?: Attributes): void;
/**
* Sends the Level Start tracking event.
*
* All parameters are optional.
*
* https://docs.fabric.io/android/answers/answers-events.html#level-start
*
* @param levelName String key describing the level.
* @param attributes Any additional user-defined attributes to be logged.
*/
sendLevelStart(levelName?: string, attributes?: Attributes): void;
/**
* Sends the Level End tracking event.
*
* All parameters are optional.
*
* https://docs.fabric.io/android/answers/answers-events.html#level-end
*
* @param levelName String key describing the level.
* @param score The score for this level.
* @param success Completed the level or failed trying.
* @param attributes Any additional user-defined attributes to be logged.
*/
sendLevelEnd(levelName?: string, score?: number, success?: boolean, attributes?: Attributes): void;
/**
* Send the Content View tracking event.
*
* https://docs.fabric.io/android/answers/answers-events.html#content-view
*/
sendContentView(name: string, type?: string, id?: string, attributes?: Attributes): void;
/**
* Shortcut for sendContentView(...) using type of "Screen".
*/
sendScreenView(name: string, id: string, attributes?: Attributes): void;
/**
* Send a custom tracking event with the given name.
*
* https://docs.fabric.io/android/answers/answers-events.html#custom-event
*/
sendCustomEvent(name: string, attributes?: Attributes): void;
}
/**
* A key/value pair of strings.
*/
interface Attributes {
[index: string]: String;
}
}
declare var fabric: FabricPlugin.FabricPluginStatic; | the_stack |
import { TaskFieldsModel } from './../models/task-fields-model.d';
import { PdfFontFamily } from '@syncfusion/ej2-pdf-export';
import { PdfStringFormat, PdfPageCountField, PdfPageNumberField } from '@syncfusion/ej2-pdf-export';
import { PdfPageTemplateElement, RectangleF, PdfCompositeField, PointF } from '@syncfusion/ej2-pdf-export';
import { PdfVerticalAlignment, PdfTextAlignment, PdfFont, PdfStandardFont } from '@syncfusion/ej2-pdf-export';
import { PdfFontStyle, PdfColor, PdfPen, PdfBrush, PdfSolidBrush, PdfDocument } from '@syncfusion/ej2-pdf-export';
import { PdfTreeGridColumn, PdfTreeGridRow, PdfTreeGridCell, PdfBorders, PdfPaddings } from './pdf-base/index';
import { ColumnModel } from './../models/column';
import { PdfGantt } from './pdf-gantt';
import {
IGanttData, PdfExportProperties, PdfQueryCellInfoEventArgs,
ITaskData, IGanttStyle, IConnectorLineObject, PdfGanttCellStyle, ITaskbarStyle, PdfColumnHeaderQueryCellInfoEventArgs,
PdfQueryTaskbarInfoEventArgs
} from './../base/interface';
import { Gantt } from './../base/gantt';
import { isNullOrUndefined, DateFormatOptions, Internationalization, getValue, extend } from '@syncfusion/ej2-base';
import { getForeignData, ValueFormatter } from '@syncfusion/ej2-grids';
import { pixelToPoint, isScheduledTask } from '../base/utils';
import { Timeline } from '../renderer/timeline';
import { PdfGanttTaskbarCollection } from './pdf-taskbar';
import { PdfGanttPredecessor } from './pdf-connector-line';
/**
* @hidden
* `ExportHelper` for `PdfExport` & `ExcelExport`
*/
export class ExportHelper {
private parent: Gantt;
private flatData: IGanttData[];
private exportProps: PdfExportProperties;
private gantt: PdfGantt;
private rowIndex: number;
private colIndex: number;
private row: PdfTreeGridRow;
private columns: ColumnModel[];
private ganttStyle: IGanttStyle;
private pdfDoc: PdfDocument;
private exportValueFormatter: ExportValueFormatter;
private totalColumnWidth: number;
public constructor(parent: Gantt) {
this.parent = parent;
}
/**
* @param {IGanttData[]} data .
* @param {PdfGantt} gantt .
* @param {PdfExportProperties} props .
* @returns {void} .
* @private
*/
public processGridExport(data: IGanttData[], gantt: PdfGantt, props: PdfExportProperties): void {
this.flatData = data;
this.gantt = gantt;
this.exportValueFormatter = new ExportValueFormatter(this.parent.locale);
this.exportProps = props;
this.rowIndex = 0;
this.colIndex = 0;
this.columns = this.parent.treeGrid.columns as ColumnModel[];
this.gantt.treeColumnIndex = this.parent.treeColumnIndex;
this.gantt.rowHeight = pixelToPoint(this.parent.rowHeight);
this.gantt.style.cellPadding.left = 0;
this.gantt.style.cellPadding.right = 0;
this.ganttStyle = this.gantt.ganttStyle;
this.gantt.borderColor = this.ganttStyle.chartGridLineColor;
this.processHeaderContent();
this.processGanttContent();
this.processTimeline();
this.processTaskbar();
this.processPredecessor();
}
private processHeaderContent(): void {
this.rowIndex++;
this.row = this.gantt.rows.addRow();
let index: number = 0;
this.columns.forEach((column: ColumnModel): void => {
if (this.isColumnVisible(column)) {
this.processColumnHeader(column, index);
index++;
}
});
}
private processColumnHeader(column: ColumnModel, index: number): void {
this.gantt.columns.add(1);
const pdfColumn: PdfTreeGridColumn = this.gantt.columns.getColumn(index);
if (this.parent.treeColumnIndex === index) {
pdfColumn.isTreeColumn = true;
}
const width: string | number = parseInt(column.width as string, 10);
pdfColumn.width = pixelToPoint(width);
this.totalColumnWidth += pdfColumn.width;
pdfColumn.headerText = column.headerText;
pdfColumn.field = column.field;
const cell: PdfTreeGridCell = this.row.cells.getCell(index);
cell.value = column.headerText;
cell.isHeaderCell = true;
const treeGridHeaderHeight: number = this.parent.timelineModule.isSingleTier ? 45 : 60;
this.copyStyles(this.ganttStyle.columnHeader, cell, false);
this.row.height = pixelToPoint(treeGridHeaderHeight);
if (column.headerTextAlign) {
cell.style.format.alignment = PdfTextAlignment[column.headerTextAlign];
}
const args: PdfColumnHeaderQueryCellInfoEventArgs = {
cell: cell,
style: cell.style,
value: cell.value,
column: column
};
if (this.parent.pdfColumnHeaderQueryCellInfo) {
this.parent.trigger('pdfColumnHeaderQueryCellInfo', args);
}
cell.value = args.value;
}
private isColumnVisible(column: ColumnModel): boolean {
const visibleColumn: boolean = column.visible || this.exportProps.includeHiddenColumn;
const templateColumn: boolean = !isNullOrUndefined(column.template) ? false : true;
return (visibleColumn && templateColumn);
}
private processGanttContent(): void {
if (this.flatData.length === 0) {
this.renderEmptyGantt();
} else {
this.flatData.forEach((data: IGanttData) => {
this.row = this.gantt.rows.addRow();
if (data.hasChildRecords) {
this.gantt.rows.getRow(this.rowIndex).isParentRow = true;
this.processRecordRow(data);
} else {
this.processRecordRow(data);
}
this.rowIndex++;
});
}
}
/**
* Method for processing the timeline details
*
* @returns {void} .
*/
private processTimeline(): void {
const timelineSettings: Timeline = this.parent.timelineModule;
this.gantt.chartHeader.topTierHeight = this.gantt.chartHeader.bottomTierHeight
= (this.parent.timelineModule.isSingleTier ? 45 : 60 / 2);
this.gantt.chartHeader.topTierCellWidth = timelineSettings.topTierCellWidth;
this.gantt.chartHeader.bottomTierCellWidth = timelineSettings.bottomTierCellWidth;
this.gantt.chartHeader.topTier = extend([], [], timelineSettings.topTierCollection, true) as [];
this.gantt.chartHeader.bottomTier = extend([], [], timelineSettings.bottomTierCollection, true) as [];
this.gantt.chartHeader.width = timelineSettings.totalTimelineWidth;
this.gantt.chartHeader.height = this.gantt.rows.getRow(0).height;
this.gantt.timelineStartDate = new Date(timelineSettings.timelineStartDate.getTime());
}
/**
* Method for create the predecessor collection for rendering
*
* @returns {void} .
*/
private processPredecessor(): void {
if (isNullOrUndefined(this.exportProps.showPredecessorLines) || this.exportProps.showPredecessorLines) {
this.parent.pdfExportModule.isPdfExport = true;
this.parent.predecessorModule.createConnectorLinesCollection();
this.parent.updatedConnectorLineCollection.forEach((data: IConnectorLineObject) => {
const predecessor: PdfGanttPredecessor = this.gantt.predecessor.add();
predecessor.parentLeft = data.parentLeft;
predecessor.childLeft = data.childLeft;
predecessor.parentWidth = data.parentWidth;
predecessor.childWidth = data.childWidth;
predecessor.parentIndex = data.parentIndex;
predecessor.childIndex = data.childIndex;
predecessor.rowHeight = data.rowHeight;
predecessor.type = data.type;
predecessor.milestoneParent = data.milestoneParent;
predecessor.milestoneChild = data.milestoneChild;
predecessor.lineWidth = this.parent.connectorLineWidth > 5 ? pixelToPoint(5) : pixelToPoint(this.parent.connectorLineWidth);
predecessor.connectorLineColor = this.ganttStyle.connectorLineColor;
this.gantt.predecessorCollection.push(predecessor);
});
this.parent.pdfExportModule.isPdfExport = false;
}
}
private processRecordRow(data: IGanttData): void {
this.colIndex = 0;
this.row.level = data.level;
this.columns.forEach((column: ColumnModel): void => {
if (this.isColumnVisible(column)) {
this.processRecordCell(data, column, this.row);
this.colIndex++;
}
});
}
private processRecordCell(data: IGanttData, column: ColumnModel, row: PdfTreeGridRow): void {
const cell: PdfTreeGridCell = row.cells.getCell(this.colIndex);
const taskFields: TaskFieldsModel = this.parent.taskFields;
const ganttProps: ITaskData = data.ganttProperties;
if (column.editType === 'datepickeredit' || column.editType === 'datetimepickeredit') {
cell.value = this.parent.getFormatedDate(data[column.field], this.parent.getDateFormat());
} else if (column.field === taskFields.duration) {
cell.value = this.parent.getDurationString(ganttProps.duration, ganttProps.durationUnit);
} else if (column.field === taskFields.resourceInfo) {
cell.value = ganttProps.resourceNames;
} else if (column.field === taskFields.work) {
cell.value = this.parent.getWorkString(ganttProps.work, ganttProps.workUnit);
} else {
cell.value = !isNullOrUndefined(data[column.field]) ? data[column.field].toString() : '';
}
cell.isHeaderCell = false;
cell.style.padding = new PdfPaddings();
this.copyStyles(this.ganttStyle.cell, cell, row.isParentRow);
if (this.colIndex !== this.parent.treeColumnIndex) {
cell.style.format.alignment = PdfTextAlignment[column.textAlign];
} else {
cell.style.format.paragraphIndent = cell.row.level * 10;
}
if (this.parent.pdfQueryCellInfo != null) {
const args: PdfQueryCellInfoEventArgs = {
data: data,
value: cell.value,
column: column,
style: cell.style,
cell: cell
};
args.value = this.exportValueFormatter.formatCellValue(args);
if (this.parent.pdfQueryCellInfo) {
this.parent.trigger('pdfQueryCellInfo', args);
}
cell.value = args.value;
}
}
/**
* Method for create the taskbar collection for rendering
*
* @returns {void} .
*/
private processTaskbar(): void {
this.flatData.forEach((data: IGanttData) => {
const taskbar: PdfGanttTaskbarCollection = this.gantt.taskbar.add();
const ganttProp: ITaskData = data.ganttProperties;
taskbar.left = ganttProp.left;
taskbar.width = ganttProp.width;
if (taskbar.left < 0) {
taskbar.width = taskbar.width + taskbar.left;
taskbar.left = 0;
}
taskbar.progress = ganttProp.progress;
taskbar.isScheduledTask = isScheduledTask(ganttProp);
if (isScheduledTask) {
if (isNullOrUndefined(ganttProp.endDate) && isNullOrUndefined(ganttProp.duration)) {
taskbar.unscheduledTaskBy = 'startDate';
} else if (isNullOrUndefined(ganttProp.startDate) && isNullOrUndefined(ganttProp.duration)) {
taskbar.unscheduledTaskBy = 'endDate';
} else {
taskbar.unscheduledTaskBy = 'duration';
taskbar.unscheduleStarteDate = this.parent.dateValidationModule.getValidStartDate(data.ganttProperties);
taskbar.unscheduleEndDate = this.parent.dateValidationModule.getValidEndDate(data.ganttProperties);
}
} else {
taskbar.unscheduleStarteDate = null;
taskbar.unscheduleEndDate = null;
}
taskbar.startDate = ganttProp.startDate;
taskbar.endDate = ganttProp.endDate;
taskbar.height = this.parent.chartRowsModule.taskBarHeight;
taskbar.isMilestone = ganttProp.isMilestone;
taskbar.milestoneColor = new PdfColor(this.ganttStyle.taskbar.milestoneColor);
taskbar.isParentTask = data.hasChildRecords;
if (ganttProp.isMilestone) {
taskbar.height = ganttProp.width;
}
if (data[this.parent.labelSettings.leftLabel]) {
taskbar.leftTaskLabel.value = data[this.parent.labelSettings.leftLabel].toString();
}
if (data[this.parent.labelSettings.rightLabel]) {
taskbar.rightTaskLabel.value = data[this.parent.labelSettings.rightLabel].toString();
}
const reduceLeft: number = ganttProp.isMilestone ? Math.floor(this.parent.chartRowsModule.taskBarHeight / 2) + 33 : 33; // 33 indicates default timeline cell width
taskbar.rightTaskLabel.left = ganttProp.left + ganttProp.width + reduceLeft; // right label left value
taskbar.fontFamily = this.ganttStyle.fontFamily;
taskbar.progressWidth = ganttProp.progressWidth;
taskbar.labelColor = new PdfColor(this.ganttStyle.label.fontColor);
taskbar.progressFontColor = new PdfColor(this.ganttStyle.taskbar.progressFontColor);
if (taskbar.isParentTask) {
taskbar.taskColor = new PdfColor(this.ganttStyle.taskbar.parentTaskColor);
taskbar.taskBorderColor = new PdfColor(this.ganttStyle.taskbar.parentTaskBorderColor);
taskbar.progressColor = new PdfColor(this.ganttStyle.taskbar.parentProgressColor);
} else {
taskbar.taskColor = new PdfColor(this.ganttStyle.taskbar.taskColor);
taskbar.taskBorderColor = new PdfColor(this.ganttStyle.taskbar.taskBorderColor);
taskbar.progressColor = new PdfColor(this.ganttStyle.taskbar.progressColor);
}
taskbar.gridLineColor = new PdfColor(this.ganttStyle.chartGridLineColor);
this.gantt.taskbarCollection.push(taskbar);
const taskStyle: ITaskbarStyle = {};
taskStyle.progressFontColor = taskbar.progressFontColor;
taskStyle.taskColor = taskbar.taskColor;
taskStyle.taskBorderColor = taskbar.taskBorderColor;
taskStyle.progressColor = taskbar.progressColor;
taskStyle.milestoneColor = taskbar.milestoneColor;
const args: PdfQueryTaskbarInfoEventArgs = {
taskbar: taskStyle,
data: data
};
if (this.parent.pdfQueryTaskbarInfo) {
this.parent.trigger('pdfQueryTaskbarInfo', args);
taskbar.progressFontColor = args.taskbar.progressFontColor;
taskbar.taskColor = args.taskbar.taskColor;
taskbar.taskBorderColor = args.taskbar.taskBorderColor;
taskbar.progressColor = args.taskbar.progressColor;
taskbar.milestoneColor = args.taskbar.milestoneColor;
}
});
}
/**
* set text alignment of each columns in exporting grid
*
* @param {string} textAlign .
* @param {PdfStringFormat} format .
* @returns {PdfStringFormat} .
* @private
*/
private getHorizontalAlignment(textAlign: string, format?: PdfStringFormat): PdfStringFormat {
if (format === undefined) {
format = new PdfStringFormat();
}
switch (textAlign) {
case 'Right':
format.alignment = PdfTextAlignment.Right;
break;
case 'Center':
format.alignment = PdfTextAlignment.Center;
break;
case 'Justify':
format.alignment = PdfTextAlignment.Justify;
break;
case 'Left':
format.alignment = PdfTextAlignment.Left;
break;
}
return format;
}
/**
* set vertical alignment of each columns in exporting grid
*
* @param {string} verticalAlign .
* @param {PdfStringFormat} format .
* @param {string} textAlign .
* @returns {PdfStringFormat} .
* @private
*/
private getVerticalAlignment(verticalAlign: string, format?: PdfStringFormat, textAlign?: string): PdfStringFormat {
if (format === undefined) {
format = new PdfStringFormat();
format = this.getHorizontalAlignment(textAlign, format);
}
switch (verticalAlign) {
case 'Bottom':
format.lineAlignment = PdfVerticalAlignment.Bottom;
break;
case 'Middle':
format.lineAlignment = PdfVerticalAlignment.Middle;
break;
case 'Top':
format.lineAlignment = PdfVerticalAlignment.Top;
break;
}
return format;
}
private getFontFamily(fontFamily: string): number {
switch (fontFamily) {
case 'TimesRoman':
return 2;
case 'Courier':
return 1;
case 'Symbol':
return 3;
case 'ZapfDingbats':
return 4;
default:
return 0;
}
}
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
private getFont(content: any): PdfFont {
if (content.font) {
return content.font;
}
const fontSize: number = (!isNullOrUndefined(content.style.fontSize)) ? (content.style.fontSize * 0.75) : 9.75;
const fontFamily: number = (!isNullOrUndefined(content.style.fontFamily)) ?
(this.getFontFamily(content.style.fontFamily)) : PdfFontFamily.TimesRoman;
let fontStyle: PdfFontStyle = PdfFontStyle.Regular;
if (!isNullOrUndefined(content.style.bold) && content.style.bold) {
fontStyle |= PdfFontStyle.Bold;
}
if (!isNullOrUndefined(content.style.italic) && content.style.italic) {
fontStyle |= PdfFontStyle.Italic;
}
if (!isNullOrUndefined(content.style.underline) && content.style.underline) {
fontStyle |= PdfFontStyle.Underline;
}
if (!isNullOrUndefined(content.style.strikeout) && content.style.strikeout) {
fontStyle |= PdfFontStyle.Strikeout;
}
return new PdfStandardFont(fontFamily, fontSize, fontStyle);
}
private renderEmptyGantt(): void {
const row: PdfTreeGridRow = this.gantt.rows.addRow();
row.cells.getCell(0).isHeaderCell = false;
row.height = pixelToPoint(this.parent.rowHeight);
this.copyStyles(this.ganttStyle.columnHeader, row.cells.getCell(0), row.isParentRow);
const count: number = this.columns.length;
this.mergeCells(0, 0, count);
}
private mergeCells(rowIndex: number, colIndex: number, lastColIndex: number): void {
this.gantt.rows.getRow(rowIndex).cells.getCell(colIndex).columnSpan = lastColIndex;
}
/* eslint-disable-next-line */
private copyStyles(style: PdfGanttCellStyle, cell: PdfTreeGridCell, isParentRow: boolean): void {
cell.style.fontColor = new PdfColor(style.fontColor);
cell.style.backgroundColor = new PdfColor(style.backgroundColor);
cell.style.borderColor = new PdfColor(style.borderColor);
cell.style.fontSize = style.fontSize;
cell.style.fontStyle = style.fontStyle;
/* eslint-disable-next-line */
cell.style.format = (<any>Object).assign(new PdfStringFormat(), style.format);
cell.style.borders = new PdfBorders();
cell.style.borders.all = new PdfPen(cell.style.borderColor);
cell.style.padding = new PdfPaddings();
let padding: number = 0;
if (cell.isHeaderCell) {
padding = this.parent.timelineModule.isSingleTier ? 45 / 2 : 60 / 2;
} else {
padding = this.parent.rowHeight / 2;
}
cell.style.padding.top = padding - style.fontSize;
cell.style.padding.bottom = padding - style.fontSize;
cell.style.padding.left = 10;
cell.style.padding.right = 10;
}
/**
* @param {PdfDocument} pdfDoc .
* @returns {void} .
* @private
*/
public initializePdf(pdfDoc: PdfDocument): void {
this.pdfDoc = pdfDoc;
const widths: number[] = [];
const treeColumnIndex: number = 0;
const tWidth: number = (this.pdfDoc.pageSettings.width - 82);
if (this.totalColumnWidth > (this.pdfDoc.pageSettings.width - 82)) {
this.gantt.style.allowHorizontalOverflow = true;
} else if ((tWidth / this.columns.length) < widths[treeColumnIndex]) {
this.gantt.columns.getColumn(treeColumnIndex).width = widths[treeColumnIndex];
}
if (this.exportProps.enableFooter || isNullOrUndefined(this.exportProps.enableFooter)) {
//code for draw the footer content
const bounds: RectangleF = new RectangleF(0, 0, pdfDoc.pageSettings.width, 35);
const pen: PdfPen = new PdfPen(this.ganttStyle.chartGridLineColor);
const footer: PdfPageTemplateElement = new PdfPageTemplateElement(bounds);
const footerBrush: PdfBrush = new PdfSolidBrush(this.ganttStyle.footer.backgroundColor);
footer.graphics.drawRectangle(pen, footerBrush, 0, 0, pdfDoc.pageSettings.width, 35);
/* eslint-disable-next-line */
const font: PdfFont = new PdfStandardFont(this.ganttStyle.fontFamily, this.ganttStyle.footer.fontSize, this.ganttStyle.footer.fontStyle);
const brush: PdfBrush = new PdfSolidBrush(this.ganttStyle.footer.fontColor);
const pageNumber: PdfPageNumberField = new PdfPageNumberField(font);
const count: PdfPageCountField = new PdfPageCountField(font, brush);
const compositeField: PdfCompositeField = new PdfCompositeField(font, brush, 'Page {0}', pageNumber, count);
compositeField.stringFormat = this.ganttStyle.footer.format;
compositeField.bounds = bounds;
compositeField.draw(footer.graphics, new PointF(0, 0));
pdfDoc.template.bottom = footer;
}
}
}
/**
* @hidden
* `ExportValueFormatter` for `PdfExport` & `ExcelExport`
*/
export class ExportValueFormatter {
private internationalization: Internationalization;
private valueFormatter: ValueFormatter;
public constructor(culture: string) {
this.valueFormatter = new ValueFormatter(culture);
this.internationalization = new Internationalization(culture);
}
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
private returnFormattedValue(args: any, customFormat: DateFormatOptions): string {
if (!isNullOrUndefined(args.value) && args.value) {
return this.valueFormatter.getFormatFunction(customFormat)(args.value);
} else {
return '';
}
}
/**
* @private
*/
/* eslint-disable-next-line */
public formatCellValue(args: any): string {
if (args.isForeignKey) {
args.value = getValue(args.column.foreignKeyValue, getForeignData(args.column, {}, args.value)[0]);
}
if (args.column.type === 'number' && args.column.format !== undefined && args.column.format !== '') {
return args.value ? this.internationalization.getNumberFormat({ format: args.column.format })(args.value) : '';
} else if (args.column.type === 'boolean') {
return args.value ? 'true' : 'false';
} else if ((args.column.type === 'date' || args.column.type === 'datetime' || args.column.type === 'time') && args.column.format !== undefined) {
if (typeof args.value === 'string') {
args.value = new Date(args.value);
}
if (typeof args.column.format === 'string') {
let format: DateFormatOptions;
if (args.column.type === 'date') {
format = { type: 'date', skeleton: args.column.format };
} else if (args.column.type === 'time') {
format = { type: 'time', skeleton: args.column.format };
} else {
format = { type: 'dateTime', skeleton: args.column.format };
}
return this.returnFormattedValue(args, format);
} else {
if (args.column.format instanceof Object && args.column.format.type === undefined) {
return (args.value.toString());
} else {
let customFormat: DateFormatOptions;
if (args.column.type === 'date') {
/* eslint-disable-next-line max-len */
customFormat = { type: args.column.format.type, format: args.column.format.format, skeleton: args.column.format.skeleton };
} else if (args.column.type === 'time') {
customFormat = { type: 'time', format: args.column.format.format, skeleton: args.column.format.skeleton };
} else {
customFormat = { type: 'dateTime', format: args.column.format.format, skeleton: args.column.format.skeleton };
}
return this.returnFormattedValue(args, customFormat);
}
}
} else {
if ((!isNullOrUndefined(args.column.type) && !isNullOrUndefined(args.value)) || !isNullOrUndefined(args.value)) {
return (args.value).toString();
} else {
return '';
}
}
}
} | the_stack |
import * as protos from '../protos/protos';
import * as assert from 'assert';
import * as sinon from 'sinon';
import {SinonStub} from 'sinon';
import {describe, it} from 'mocha';
import * as cloudtasksModule from '../src';
import {PassThrough} from 'stream';
import {protobuf} from 'google-gax';
function generateSampleMessage<T extends object>(instance: T) {
const filledObject = (
instance.constructor as typeof protobuf.Message
).toObject(instance as protobuf.Message<T>, {defaults: true});
return (instance.constructor as typeof protobuf.Message).fromObject(
filledObject
) as T;
}
function stubSimpleCall<ResponseType>(response?: ResponseType, error?: Error) {
return error
? sinon.stub().rejects(error)
: sinon.stub().resolves([response]);
}
function stubSimpleCallWithCallback<ResponseType>(
response?: ResponseType,
error?: Error
) {
return error
? sinon.stub().callsArgWith(2, error)
: sinon.stub().callsArgWith(2, null, response);
}
function stubPageStreamingCall<ResponseType>(
responses?: ResponseType[],
error?: Error
) {
const pagingStub = sinon.stub();
if (responses) {
for (let i = 0; i < responses.length; ++i) {
pagingStub.onCall(i).callsArgWith(2, null, responses[i]);
}
}
const transformStub = error
? sinon.stub().callsArgWith(2, error)
: pagingStub;
const mockStream = new PassThrough({
objectMode: true,
transform: transformStub,
});
// trigger as many responses as needed
if (responses) {
for (let i = 0; i < responses.length; ++i) {
setImmediate(() => {
mockStream.write({});
});
}
setImmediate(() => {
mockStream.end();
});
} else {
setImmediate(() => {
mockStream.write({});
});
setImmediate(() => {
mockStream.end();
});
}
return sinon.stub().returns(mockStream);
}
function stubAsyncIterationCall<ResponseType>(
responses?: ResponseType[],
error?: Error
) {
let counter = 0;
const asyncIterable = {
[Symbol.asyncIterator]() {
return {
async next() {
if (error) {
return Promise.reject(error);
}
if (counter >= responses!.length) {
return Promise.resolve({done: true, value: undefined});
}
return Promise.resolve({done: false, value: responses![counter++]});
},
};
},
};
return sinon.stub().returns(asyncIterable);
}
describe('v2beta3.CloudTasksClient', () => {
it('has servicePath', () => {
const servicePath = cloudtasksModule.v2beta3.CloudTasksClient.servicePath;
assert(servicePath);
});
it('has apiEndpoint', () => {
const apiEndpoint = cloudtasksModule.v2beta3.CloudTasksClient.apiEndpoint;
assert(apiEndpoint);
});
it('has port', () => {
const port = cloudtasksModule.v2beta3.CloudTasksClient.port;
assert(port);
assert(typeof port === 'number');
});
it('should create a client with no option', () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient();
assert(client);
});
it('should create a client with gRPC fallback', () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
fallback: true,
});
assert(client);
});
it('has initialize method and supports deferred initialization', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
assert.strictEqual(client.cloudTasksStub, undefined);
await client.initialize();
assert(client.cloudTasksStub);
});
it('has close method', () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.close();
});
it('has getProjectId method', async () => {
const fakeProjectId = 'fake-project-id';
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.auth.getProjectId = sinon.stub().resolves(fakeProjectId);
const result = await client.getProjectId();
assert.strictEqual(result, fakeProjectId);
assert((client.auth.getProjectId as SinonStub).calledWithExactly());
});
it('has getProjectId method with callback', async () => {
const fakeProjectId = 'fake-project-id';
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.auth.getProjectId = sinon
.stub()
.callsArgWith(0, null, fakeProjectId);
const promise = new Promise((resolve, reject) => {
client.getProjectId((err?: Error | null, projectId?: string | null) => {
if (err) {
reject(err);
} else {
resolve(projectId);
}
});
});
const result = await promise;
assert.strictEqual(result, fakeProjectId);
});
describe('getQueue', () => {
it('invokes getQueue without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.GetQueueRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.Queue()
);
client.innerApiCalls.getQueue = stubSimpleCall(expectedResponse);
const [response] = await client.getQueue(request);
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.getQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
it('invokes getQueue without error using callback', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.GetQueueRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.Queue()
);
client.innerApiCalls.getQueue =
stubSimpleCallWithCallback(expectedResponse);
const promise = new Promise((resolve, reject) => {
client.getQueue(
request,
(
err?: Error | null,
result?: protos.google.cloud.tasks.v2beta3.IQueue | null
) => {
if (err) {
reject(err);
} else {
resolve(result);
}
}
);
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.getQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions /*, callback defined above */)
);
});
it('invokes getQueue with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.GetQueueRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedError = new Error('expected');
client.innerApiCalls.getQueue = stubSimpleCall(undefined, expectedError);
await assert.rejects(client.getQueue(request), expectedError);
assert(
(client.innerApiCalls.getQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
});
describe('createQueue', () => {
it('invokes createQueue without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.CreateQueueRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.Queue()
);
client.innerApiCalls.createQueue = stubSimpleCall(expectedResponse);
const [response] = await client.createQueue(request);
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.createQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
it('invokes createQueue without error using callback', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.CreateQueueRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.Queue()
);
client.innerApiCalls.createQueue =
stubSimpleCallWithCallback(expectedResponse);
const promise = new Promise((resolve, reject) => {
client.createQueue(
request,
(
err?: Error | null,
result?: protos.google.cloud.tasks.v2beta3.IQueue | null
) => {
if (err) {
reject(err);
} else {
resolve(result);
}
}
);
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.createQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions /*, callback defined above */)
);
});
it('invokes createQueue with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.CreateQueueRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedError = new Error('expected');
client.innerApiCalls.createQueue = stubSimpleCall(
undefined,
expectedError
);
await assert.rejects(client.createQueue(request), expectedError);
assert(
(client.innerApiCalls.createQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
});
describe('updateQueue', () => {
it('invokes updateQueue without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.UpdateQueueRequest()
);
request.queue = {};
request.queue.name = '';
const expectedHeaderRequestParams = 'queue.name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.Queue()
);
client.innerApiCalls.updateQueue = stubSimpleCall(expectedResponse);
const [response] = await client.updateQueue(request);
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.updateQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
it('invokes updateQueue without error using callback', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.UpdateQueueRequest()
);
request.queue = {};
request.queue.name = '';
const expectedHeaderRequestParams = 'queue.name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.Queue()
);
client.innerApiCalls.updateQueue =
stubSimpleCallWithCallback(expectedResponse);
const promise = new Promise((resolve, reject) => {
client.updateQueue(
request,
(
err?: Error | null,
result?: protos.google.cloud.tasks.v2beta3.IQueue | null
) => {
if (err) {
reject(err);
} else {
resolve(result);
}
}
);
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.updateQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions /*, callback defined above */)
);
});
it('invokes updateQueue with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.UpdateQueueRequest()
);
request.queue = {};
request.queue.name = '';
const expectedHeaderRequestParams = 'queue.name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedError = new Error('expected');
client.innerApiCalls.updateQueue = stubSimpleCall(
undefined,
expectedError
);
await assert.rejects(client.updateQueue(request), expectedError);
assert(
(client.innerApiCalls.updateQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
});
describe('deleteQueue', () => {
it('invokes deleteQueue without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.DeleteQueueRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.protobuf.Empty()
);
client.innerApiCalls.deleteQueue = stubSimpleCall(expectedResponse);
const [response] = await client.deleteQueue(request);
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.deleteQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
it('invokes deleteQueue without error using callback', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.DeleteQueueRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.protobuf.Empty()
);
client.innerApiCalls.deleteQueue =
stubSimpleCallWithCallback(expectedResponse);
const promise = new Promise((resolve, reject) => {
client.deleteQueue(
request,
(
err?: Error | null,
result?: protos.google.protobuf.IEmpty | null
) => {
if (err) {
reject(err);
} else {
resolve(result);
}
}
);
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.deleteQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions /*, callback defined above */)
);
});
it('invokes deleteQueue with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.DeleteQueueRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedError = new Error('expected');
client.innerApiCalls.deleteQueue = stubSimpleCall(
undefined,
expectedError
);
await assert.rejects(client.deleteQueue(request), expectedError);
assert(
(client.innerApiCalls.deleteQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
});
describe('purgeQueue', () => {
it('invokes purgeQueue without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.PurgeQueueRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.Queue()
);
client.innerApiCalls.purgeQueue = stubSimpleCall(expectedResponse);
const [response] = await client.purgeQueue(request);
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.purgeQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
it('invokes purgeQueue without error using callback', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.PurgeQueueRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.Queue()
);
client.innerApiCalls.purgeQueue =
stubSimpleCallWithCallback(expectedResponse);
const promise = new Promise((resolve, reject) => {
client.purgeQueue(
request,
(
err?: Error | null,
result?: protos.google.cloud.tasks.v2beta3.IQueue | null
) => {
if (err) {
reject(err);
} else {
resolve(result);
}
}
);
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.purgeQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions /*, callback defined above */)
);
});
it('invokes purgeQueue with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.PurgeQueueRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedError = new Error('expected');
client.innerApiCalls.purgeQueue = stubSimpleCall(
undefined,
expectedError
);
await assert.rejects(client.purgeQueue(request), expectedError);
assert(
(client.innerApiCalls.purgeQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
});
describe('pauseQueue', () => {
it('invokes pauseQueue without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.PauseQueueRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.Queue()
);
client.innerApiCalls.pauseQueue = stubSimpleCall(expectedResponse);
const [response] = await client.pauseQueue(request);
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.pauseQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
it('invokes pauseQueue without error using callback', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.PauseQueueRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.Queue()
);
client.innerApiCalls.pauseQueue =
stubSimpleCallWithCallback(expectedResponse);
const promise = new Promise((resolve, reject) => {
client.pauseQueue(
request,
(
err?: Error | null,
result?: protos.google.cloud.tasks.v2beta3.IQueue | null
) => {
if (err) {
reject(err);
} else {
resolve(result);
}
}
);
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.pauseQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions /*, callback defined above */)
);
});
it('invokes pauseQueue with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.PauseQueueRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedError = new Error('expected');
client.innerApiCalls.pauseQueue = stubSimpleCall(
undefined,
expectedError
);
await assert.rejects(client.pauseQueue(request), expectedError);
assert(
(client.innerApiCalls.pauseQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
});
describe('resumeQueue', () => {
it('invokes resumeQueue without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.ResumeQueueRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.Queue()
);
client.innerApiCalls.resumeQueue = stubSimpleCall(expectedResponse);
const [response] = await client.resumeQueue(request);
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.resumeQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
it('invokes resumeQueue without error using callback', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.ResumeQueueRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.Queue()
);
client.innerApiCalls.resumeQueue =
stubSimpleCallWithCallback(expectedResponse);
const promise = new Promise((resolve, reject) => {
client.resumeQueue(
request,
(
err?: Error | null,
result?: protos.google.cloud.tasks.v2beta3.IQueue | null
) => {
if (err) {
reject(err);
} else {
resolve(result);
}
}
);
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.resumeQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions /*, callback defined above */)
);
});
it('invokes resumeQueue with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.ResumeQueueRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedError = new Error('expected');
client.innerApiCalls.resumeQueue = stubSimpleCall(
undefined,
expectedError
);
await assert.rejects(client.resumeQueue(request), expectedError);
assert(
(client.innerApiCalls.resumeQueue as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
});
describe('getIamPolicy', () => {
it('invokes getIamPolicy without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.iam.v1.GetIamPolicyRequest()
);
request.resource = '';
const expectedHeaderRequestParams = 'resource=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.iam.v1.Policy()
);
client.innerApiCalls.getIamPolicy = stubSimpleCall(expectedResponse);
const [response] = await client.getIamPolicy(request);
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.getIamPolicy as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
it('invokes getIamPolicy without error using callback', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.iam.v1.GetIamPolicyRequest()
);
request.resource = '';
const expectedHeaderRequestParams = 'resource=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.iam.v1.Policy()
);
client.innerApiCalls.getIamPolicy =
stubSimpleCallWithCallback(expectedResponse);
const promise = new Promise((resolve, reject) => {
client.getIamPolicy(
request,
(
err?: Error | null,
result?: protos.google.iam.v1.IPolicy | null
) => {
if (err) {
reject(err);
} else {
resolve(result);
}
}
);
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.getIamPolicy as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions /*, callback defined above */)
);
});
it('invokes getIamPolicy with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.iam.v1.GetIamPolicyRequest()
);
request.resource = '';
const expectedHeaderRequestParams = 'resource=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedError = new Error('expected');
client.innerApiCalls.getIamPolicy = stubSimpleCall(
undefined,
expectedError
);
await assert.rejects(client.getIamPolicy(request), expectedError);
assert(
(client.innerApiCalls.getIamPolicy as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
});
describe('setIamPolicy', () => {
it('invokes setIamPolicy without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.iam.v1.SetIamPolicyRequest()
);
request.resource = '';
const expectedHeaderRequestParams = 'resource=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.iam.v1.Policy()
);
client.innerApiCalls.setIamPolicy = stubSimpleCall(expectedResponse);
const [response] = await client.setIamPolicy(request);
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.setIamPolicy as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
it('invokes setIamPolicy without error using callback', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.iam.v1.SetIamPolicyRequest()
);
request.resource = '';
const expectedHeaderRequestParams = 'resource=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.iam.v1.Policy()
);
client.innerApiCalls.setIamPolicy =
stubSimpleCallWithCallback(expectedResponse);
const promise = new Promise((resolve, reject) => {
client.setIamPolicy(
request,
(
err?: Error | null,
result?: protos.google.iam.v1.IPolicy | null
) => {
if (err) {
reject(err);
} else {
resolve(result);
}
}
);
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.setIamPolicy as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions /*, callback defined above */)
);
});
it('invokes setIamPolicy with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.iam.v1.SetIamPolicyRequest()
);
request.resource = '';
const expectedHeaderRequestParams = 'resource=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedError = new Error('expected');
client.innerApiCalls.setIamPolicy = stubSimpleCall(
undefined,
expectedError
);
await assert.rejects(client.setIamPolicy(request), expectedError);
assert(
(client.innerApiCalls.setIamPolicy as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
});
describe('testIamPermissions', () => {
it('invokes testIamPermissions without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.iam.v1.TestIamPermissionsRequest()
);
request.resource = '';
const expectedHeaderRequestParams = 'resource=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.iam.v1.TestIamPermissionsResponse()
);
client.innerApiCalls.testIamPermissions =
stubSimpleCall(expectedResponse);
const [response] = await client.testIamPermissions(request);
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.testIamPermissions as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
it('invokes testIamPermissions without error using callback', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.iam.v1.TestIamPermissionsRequest()
);
request.resource = '';
const expectedHeaderRequestParams = 'resource=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.iam.v1.TestIamPermissionsResponse()
);
client.innerApiCalls.testIamPermissions =
stubSimpleCallWithCallback(expectedResponse);
const promise = new Promise((resolve, reject) => {
client.testIamPermissions(
request,
(
err?: Error | null,
result?: protos.google.iam.v1.ITestIamPermissionsResponse | null
) => {
if (err) {
reject(err);
} else {
resolve(result);
}
}
);
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.testIamPermissions as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions /*, callback defined above */)
);
});
it('invokes testIamPermissions with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.iam.v1.TestIamPermissionsRequest()
);
request.resource = '';
const expectedHeaderRequestParams = 'resource=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedError = new Error('expected');
client.innerApiCalls.testIamPermissions = stubSimpleCall(
undefined,
expectedError
);
await assert.rejects(client.testIamPermissions(request), expectedError);
assert(
(client.innerApiCalls.testIamPermissions as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
});
describe('getTask', () => {
it('invokes getTask without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.GetTaskRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.Task()
);
client.innerApiCalls.getTask = stubSimpleCall(expectedResponse);
const [response] = await client.getTask(request);
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.getTask as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
it('invokes getTask without error using callback', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.GetTaskRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.Task()
);
client.innerApiCalls.getTask =
stubSimpleCallWithCallback(expectedResponse);
const promise = new Promise((resolve, reject) => {
client.getTask(
request,
(
err?: Error | null,
result?: protos.google.cloud.tasks.v2beta3.ITask | null
) => {
if (err) {
reject(err);
} else {
resolve(result);
}
}
);
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.getTask as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions /*, callback defined above */)
);
});
it('invokes getTask with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.GetTaskRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedError = new Error('expected');
client.innerApiCalls.getTask = stubSimpleCall(undefined, expectedError);
await assert.rejects(client.getTask(request), expectedError);
assert(
(client.innerApiCalls.getTask as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
});
describe('createTask', () => {
it('invokes createTask without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.CreateTaskRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.Task()
);
client.innerApiCalls.createTask = stubSimpleCall(expectedResponse);
const [response] = await client.createTask(request);
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.createTask as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
it('invokes createTask without error using callback', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.CreateTaskRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.Task()
);
client.innerApiCalls.createTask =
stubSimpleCallWithCallback(expectedResponse);
const promise = new Promise((resolve, reject) => {
client.createTask(
request,
(
err?: Error | null,
result?: protos.google.cloud.tasks.v2beta3.ITask | null
) => {
if (err) {
reject(err);
} else {
resolve(result);
}
}
);
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.createTask as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions /*, callback defined above */)
);
});
it('invokes createTask with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.CreateTaskRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedError = new Error('expected');
client.innerApiCalls.createTask = stubSimpleCall(
undefined,
expectedError
);
await assert.rejects(client.createTask(request), expectedError);
assert(
(client.innerApiCalls.createTask as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
});
describe('deleteTask', () => {
it('invokes deleteTask without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.DeleteTaskRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.protobuf.Empty()
);
client.innerApiCalls.deleteTask = stubSimpleCall(expectedResponse);
const [response] = await client.deleteTask(request);
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.deleteTask as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
it('invokes deleteTask without error using callback', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.DeleteTaskRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.protobuf.Empty()
);
client.innerApiCalls.deleteTask =
stubSimpleCallWithCallback(expectedResponse);
const promise = new Promise((resolve, reject) => {
client.deleteTask(
request,
(
err?: Error | null,
result?: protos.google.protobuf.IEmpty | null
) => {
if (err) {
reject(err);
} else {
resolve(result);
}
}
);
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.deleteTask as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions /*, callback defined above */)
);
});
it('invokes deleteTask with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.DeleteTaskRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedError = new Error('expected');
client.innerApiCalls.deleteTask = stubSimpleCall(
undefined,
expectedError
);
await assert.rejects(client.deleteTask(request), expectedError);
assert(
(client.innerApiCalls.deleteTask as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
});
describe('runTask', () => {
it('invokes runTask without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.RunTaskRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.Task()
);
client.innerApiCalls.runTask = stubSimpleCall(expectedResponse);
const [response] = await client.runTask(request);
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.runTask as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
it('invokes runTask without error using callback', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.RunTaskRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.Task()
);
client.innerApiCalls.runTask =
stubSimpleCallWithCallback(expectedResponse);
const promise = new Promise((resolve, reject) => {
client.runTask(
request,
(
err?: Error | null,
result?: protos.google.cloud.tasks.v2beta3.ITask | null
) => {
if (err) {
reject(err);
} else {
resolve(result);
}
}
);
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.runTask as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions /*, callback defined above */)
);
});
it('invokes runTask with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.RunTaskRequest()
);
request.name = '';
const expectedHeaderRequestParams = 'name=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedError = new Error('expected');
client.innerApiCalls.runTask = stubSimpleCall(undefined, expectedError);
await assert.rejects(client.runTask(request), expectedError);
assert(
(client.innerApiCalls.runTask as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
});
describe('listQueues', () => {
it('invokes listQueues without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.ListQueuesRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = [
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Queue()),
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Queue()),
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Queue()),
];
client.innerApiCalls.listQueues = stubSimpleCall(expectedResponse);
const [response] = await client.listQueues(request);
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.listQueues as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
it('invokes listQueues without error using callback', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.ListQueuesRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = [
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Queue()),
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Queue()),
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Queue()),
];
client.innerApiCalls.listQueues =
stubSimpleCallWithCallback(expectedResponse);
const promise = new Promise((resolve, reject) => {
client.listQueues(
request,
(
err?: Error | null,
result?: protos.google.cloud.tasks.v2beta3.IQueue[] | null
) => {
if (err) {
reject(err);
} else {
resolve(result);
}
}
);
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.listQueues as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions /*, callback defined above */)
);
});
it('invokes listQueues with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.ListQueuesRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedError = new Error('expected');
client.innerApiCalls.listQueues = stubSimpleCall(
undefined,
expectedError
);
await assert.rejects(client.listQueues(request), expectedError);
assert(
(client.innerApiCalls.listQueues as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
it('invokes listQueuesStream without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.ListQueuesRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedResponse = [
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Queue()),
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Queue()),
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Queue()),
];
client.descriptors.page.listQueues.createStream =
stubPageStreamingCall(expectedResponse);
const stream = client.listQueuesStream(request);
const promise = new Promise((resolve, reject) => {
const responses: protos.google.cloud.tasks.v2beta3.Queue[] = [];
stream.on(
'data',
(response: protos.google.cloud.tasks.v2beta3.Queue) => {
responses.push(response);
}
);
stream.on('end', () => {
resolve(responses);
});
stream.on('error', (err: Error) => {
reject(err);
});
});
const responses = await promise;
assert.deepStrictEqual(responses, expectedResponse);
assert(
(client.descriptors.page.listQueues.createStream as SinonStub)
.getCall(0)
.calledWith(client.innerApiCalls.listQueues, request)
);
assert.strictEqual(
(client.descriptors.page.listQueues.createStream as SinonStub).getCall(
0
).args[2].otherArgs.headers['x-goog-request-params'],
expectedHeaderRequestParams
);
});
it('invokes listQueuesStream with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.ListQueuesRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedError = new Error('expected');
client.descriptors.page.listQueues.createStream = stubPageStreamingCall(
undefined,
expectedError
);
const stream = client.listQueuesStream(request);
const promise = new Promise((resolve, reject) => {
const responses: protos.google.cloud.tasks.v2beta3.Queue[] = [];
stream.on(
'data',
(response: protos.google.cloud.tasks.v2beta3.Queue) => {
responses.push(response);
}
);
stream.on('end', () => {
resolve(responses);
});
stream.on('error', (err: Error) => {
reject(err);
});
});
await assert.rejects(promise, expectedError);
assert(
(client.descriptors.page.listQueues.createStream as SinonStub)
.getCall(0)
.calledWith(client.innerApiCalls.listQueues, request)
);
assert.strictEqual(
(client.descriptors.page.listQueues.createStream as SinonStub).getCall(
0
).args[2].otherArgs.headers['x-goog-request-params'],
expectedHeaderRequestParams
);
});
it('uses async iteration with listQueues without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.ListQueuesRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedResponse = [
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Queue()),
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Queue()),
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Queue()),
];
client.descriptors.page.listQueues.asyncIterate =
stubAsyncIterationCall(expectedResponse);
const responses: protos.google.cloud.tasks.v2beta3.IQueue[] = [];
const iterable = client.listQueuesAsync(request);
for await (const resource of iterable) {
responses.push(resource!);
}
assert.deepStrictEqual(responses, expectedResponse);
assert.deepStrictEqual(
(client.descriptors.page.listQueues.asyncIterate as SinonStub).getCall(
0
).args[1],
request
);
assert.strictEqual(
(client.descriptors.page.listQueues.asyncIterate as SinonStub).getCall(
0
).args[2].otherArgs.headers['x-goog-request-params'],
expectedHeaderRequestParams
);
});
it('uses async iteration with listQueues with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.ListQueuesRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedError = new Error('expected');
client.descriptors.page.listQueues.asyncIterate = stubAsyncIterationCall(
undefined,
expectedError
);
const iterable = client.listQueuesAsync(request);
await assert.rejects(async () => {
const responses: protos.google.cloud.tasks.v2beta3.IQueue[] = [];
for await (const resource of iterable) {
responses.push(resource!);
}
});
assert.deepStrictEqual(
(client.descriptors.page.listQueues.asyncIterate as SinonStub).getCall(
0
).args[1],
request
);
assert.strictEqual(
(client.descriptors.page.listQueues.asyncIterate as SinonStub).getCall(
0
).args[2].otherArgs.headers['x-goog-request-params'],
expectedHeaderRequestParams
);
});
});
describe('listTasks', () => {
it('invokes listTasks without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.ListTasksRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = [
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Task()),
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Task()),
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Task()),
];
client.innerApiCalls.listTasks = stubSimpleCall(expectedResponse);
const [response] = await client.listTasks(request);
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.listTasks as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
it('invokes listTasks without error using callback', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.ListTasksRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedResponse = [
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Task()),
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Task()),
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Task()),
];
client.innerApiCalls.listTasks =
stubSimpleCallWithCallback(expectedResponse);
const promise = new Promise((resolve, reject) => {
client.listTasks(
request,
(
err?: Error | null,
result?: protos.google.cloud.tasks.v2beta3.ITask[] | null
) => {
if (err) {
reject(err);
} else {
resolve(result);
}
}
);
});
const response = await promise;
assert.deepStrictEqual(response, expectedResponse);
assert(
(client.innerApiCalls.listTasks as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions /*, callback defined above */)
);
});
it('invokes listTasks with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.ListTasksRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedOptions = {
otherArgs: {
headers: {
'x-goog-request-params': expectedHeaderRequestParams,
},
},
};
const expectedError = new Error('expected');
client.innerApiCalls.listTasks = stubSimpleCall(undefined, expectedError);
await assert.rejects(client.listTasks(request), expectedError);
assert(
(client.innerApiCalls.listTasks as SinonStub)
.getCall(0)
.calledWith(request, expectedOptions, undefined)
);
});
it('invokes listTasksStream without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.ListTasksRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedResponse = [
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Task()),
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Task()),
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Task()),
];
client.descriptors.page.listTasks.createStream =
stubPageStreamingCall(expectedResponse);
const stream = client.listTasksStream(request);
const promise = new Promise((resolve, reject) => {
const responses: protos.google.cloud.tasks.v2beta3.Task[] = [];
stream.on(
'data',
(response: protos.google.cloud.tasks.v2beta3.Task) => {
responses.push(response);
}
);
stream.on('end', () => {
resolve(responses);
});
stream.on('error', (err: Error) => {
reject(err);
});
});
const responses = await promise;
assert.deepStrictEqual(responses, expectedResponse);
assert(
(client.descriptors.page.listTasks.createStream as SinonStub)
.getCall(0)
.calledWith(client.innerApiCalls.listTasks, request)
);
assert.strictEqual(
(client.descriptors.page.listTasks.createStream as SinonStub).getCall(0)
.args[2].otherArgs.headers['x-goog-request-params'],
expectedHeaderRequestParams
);
});
it('invokes listTasksStream with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.ListTasksRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedError = new Error('expected');
client.descriptors.page.listTasks.createStream = stubPageStreamingCall(
undefined,
expectedError
);
const stream = client.listTasksStream(request);
const promise = new Promise((resolve, reject) => {
const responses: protos.google.cloud.tasks.v2beta3.Task[] = [];
stream.on(
'data',
(response: protos.google.cloud.tasks.v2beta3.Task) => {
responses.push(response);
}
);
stream.on('end', () => {
resolve(responses);
});
stream.on('error', (err: Error) => {
reject(err);
});
});
await assert.rejects(promise, expectedError);
assert(
(client.descriptors.page.listTasks.createStream as SinonStub)
.getCall(0)
.calledWith(client.innerApiCalls.listTasks, request)
);
assert.strictEqual(
(client.descriptors.page.listTasks.createStream as SinonStub).getCall(0)
.args[2].otherArgs.headers['x-goog-request-params'],
expectedHeaderRequestParams
);
});
it('uses async iteration with listTasks without error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.ListTasksRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedResponse = [
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Task()),
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Task()),
generateSampleMessage(new protos.google.cloud.tasks.v2beta3.Task()),
];
client.descriptors.page.listTasks.asyncIterate =
stubAsyncIterationCall(expectedResponse);
const responses: protos.google.cloud.tasks.v2beta3.ITask[] = [];
const iterable = client.listTasksAsync(request);
for await (const resource of iterable) {
responses.push(resource!);
}
assert.deepStrictEqual(responses, expectedResponse);
assert.deepStrictEqual(
(client.descriptors.page.listTasks.asyncIterate as SinonStub).getCall(0)
.args[1],
request
);
assert.strictEqual(
(client.descriptors.page.listTasks.asyncIterate as SinonStub).getCall(0)
.args[2].otherArgs.headers['x-goog-request-params'],
expectedHeaderRequestParams
);
});
it('uses async iteration with listTasks with error', async () => {
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
const request = generateSampleMessage(
new protos.google.cloud.tasks.v2beta3.ListTasksRequest()
);
request.parent = '';
const expectedHeaderRequestParams = 'parent=';
const expectedError = new Error('expected');
client.descriptors.page.listTasks.asyncIterate = stubAsyncIterationCall(
undefined,
expectedError
);
const iterable = client.listTasksAsync(request);
await assert.rejects(async () => {
const responses: protos.google.cloud.tasks.v2beta3.ITask[] = [];
for await (const resource of iterable) {
responses.push(resource!);
}
});
assert.deepStrictEqual(
(client.descriptors.page.listTasks.asyncIterate as SinonStub).getCall(0)
.args[1],
request
);
assert.strictEqual(
(client.descriptors.page.listTasks.asyncIterate as SinonStub).getCall(0)
.args[2].otherArgs.headers['x-goog-request-params'],
expectedHeaderRequestParams
);
});
});
describe('Path templates', () => {
describe('location', () => {
const fakePath = '/rendered/path/location';
const expectedParameters = {
project: 'projectValue',
location: 'locationValue',
};
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
client.pathTemplates.locationPathTemplate.render = sinon
.stub()
.returns(fakePath);
client.pathTemplates.locationPathTemplate.match = sinon
.stub()
.returns(expectedParameters);
it('locationPath', () => {
const result = client.locationPath('projectValue', 'locationValue');
assert.strictEqual(result, fakePath);
assert(
(client.pathTemplates.locationPathTemplate.render as SinonStub)
.getCall(-1)
.calledWith(expectedParameters)
);
});
it('matchProjectFromLocationName', () => {
const result = client.matchProjectFromLocationName(fakePath);
assert.strictEqual(result, 'projectValue');
assert(
(client.pathTemplates.locationPathTemplate.match as SinonStub)
.getCall(-1)
.calledWith(fakePath)
);
});
it('matchLocationFromLocationName', () => {
const result = client.matchLocationFromLocationName(fakePath);
assert.strictEqual(result, 'locationValue');
assert(
(client.pathTemplates.locationPathTemplate.match as SinonStub)
.getCall(-1)
.calledWith(fakePath)
);
});
});
describe('project', () => {
const fakePath = '/rendered/path/project';
const expectedParameters = {
project: 'projectValue',
};
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
client.pathTemplates.projectPathTemplate.render = sinon
.stub()
.returns(fakePath);
client.pathTemplates.projectPathTemplate.match = sinon
.stub()
.returns(expectedParameters);
it('projectPath', () => {
const result = client.projectPath('projectValue');
assert.strictEqual(result, fakePath);
assert(
(client.pathTemplates.projectPathTemplate.render as SinonStub)
.getCall(-1)
.calledWith(expectedParameters)
);
});
it('matchProjectFromProjectName', () => {
const result = client.matchProjectFromProjectName(fakePath);
assert.strictEqual(result, 'projectValue');
assert(
(client.pathTemplates.projectPathTemplate.match as SinonStub)
.getCall(-1)
.calledWith(fakePath)
);
});
});
describe('queue', () => {
const fakePath = '/rendered/path/queue';
const expectedParameters = {
project: 'projectValue',
location: 'locationValue',
queue: 'queueValue',
};
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
client.pathTemplates.queuePathTemplate.render = sinon
.stub()
.returns(fakePath);
client.pathTemplates.queuePathTemplate.match = sinon
.stub()
.returns(expectedParameters);
it('queuePath', () => {
const result = client.queuePath(
'projectValue',
'locationValue',
'queueValue'
);
assert.strictEqual(result, fakePath);
assert(
(client.pathTemplates.queuePathTemplate.render as SinonStub)
.getCall(-1)
.calledWith(expectedParameters)
);
});
it('matchProjectFromQueueName', () => {
const result = client.matchProjectFromQueueName(fakePath);
assert.strictEqual(result, 'projectValue');
assert(
(client.pathTemplates.queuePathTemplate.match as SinonStub)
.getCall(-1)
.calledWith(fakePath)
);
});
it('matchLocationFromQueueName', () => {
const result = client.matchLocationFromQueueName(fakePath);
assert.strictEqual(result, 'locationValue');
assert(
(client.pathTemplates.queuePathTemplate.match as SinonStub)
.getCall(-1)
.calledWith(fakePath)
);
});
it('matchQueueFromQueueName', () => {
const result = client.matchQueueFromQueueName(fakePath);
assert.strictEqual(result, 'queueValue');
assert(
(client.pathTemplates.queuePathTemplate.match as SinonStub)
.getCall(-1)
.calledWith(fakePath)
);
});
});
describe('task', () => {
const fakePath = '/rendered/path/task';
const expectedParameters = {
project: 'projectValue',
location: 'locationValue',
queue: 'queueValue',
task: 'taskValue',
};
const client = new cloudtasksModule.v2beta3.CloudTasksClient({
credentials: {client_email: 'bogus', private_key: 'bogus'},
projectId: 'bogus',
});
client.initialize();
client.pathTemplates.taskPathTemplate.render = sinon
.stub()
.returns(fakePath);
client.pathTemplates.taskPathTemplate.match = sinon
.stub()
.returns(expectedParameters);
it('taskPath', () => {
const result = client.taskPath(
'projectValue',
'locationValue',
'queueValue',
'taskValue'
);
assert.strictEqual(result, fakePath);
assert(
(client.pathTemplates.taskPathTemplate.render as SinonStub)
.getCall(-1)
.calledWith(expectedParameters)
);
});
it('matchProjectFromTaskName', () => {
const result = client.matchProjectFromTaskName(fakePath);
assert.strictEqual(result, 'projectValue');
assert(
(client.pathTemplates.taskPathTemplate.match as SinonStub)
.getCall(-1)
.calledWith(fakePath)
);
});
it('matchLocationFromTaskName', () => {
const result = client.matchLocationFromTaskName(fakePath);
assert.strictEqual(result, 'locationValue');
assert(
(client.pathTemplates.taskPathTemplate.match as SinonStub)
.getCall(-1)
.calledWith(fakePath)
);
});
it('matchQueueFromTaskName', () => {
const result = client.matchQueueFromTaskName(fakePath);
assert.strictEqual(result, 'queueValue');
assert(
(client.pathTemplates.taskPathTemplate.match as SinonStub)
.getCall(-1)
.calledWith(fakePath)
);
});
it('matchTaskFromTaskName', () => {
const result = client.matchTaskFromTaskName(fakePath);
assert.strictEqual(result, 'taskValue');
assert(
(client.pathTemplates.taskPathTemplate.match as SinonStub)
.getCall(-1)
.calledWith(fakePath)
);
});
});
});
}); | the_stack |
import { Injectable } from '@angular/core';
import { CoreConstants } from '@/core/constants';
import { CoreSite, CoreSiteWSPreSets } from '@classes/site';
import { CoreCourseAnyModuleData } from '@features/course/services/course';
import { CoreCourses } from '@features/courses/services/courses';
import { CoreApp } from '@services/app';
import { CoreFilepool } from '@services/filepool';
import { CoreLang } from '@services/lang';
import { CoreSites } from '@services/sites';
import { CoreTextUtils } from '@services/utils/text';
import { CoreUtils, PromiseDefer } from '@services/utils/utils';
import { CoreWSExternalFile, CoreWSExternalWarning } from '@services/ws';
import { makeSingleton } from '@singletons';
import { CoreEvents } from '@singletons/events';
import { CoreLogger } from '@singletons/logger';
import { CoreSitePluginsModuleHandler } from '../classes/handlers/module-handler';
const ROOT_CACHE_KEY = 'CoreSitePlugins:';
/**
* Service to provide functionalities regarding site plugins.
*/
@Injectable({ providedIn: 'root' })
export class CoreSitePluginsProvider {
static readonly COMPONENT = 'CoreSitePlugins';
static readonly UPDATE_COURSE_CONTENT = 'siteplugins_update_course_content';
protected logger: CoreLogger;
protected sitePlugins: {[name: string]: CoreSitePluginsHandler} = {}; // Site plugins registered.
protected sitePluginPromises: {[name: string]: Promise<void>} = {}; // Promises of loading plugins.
protected fetchPluginsDeferred: PromiseDefer<void>;
protected moduleHandlerInstances: Record<string, CoreSitePluginsModuleHandler> = {};
hasSitePluginsLoaded = false;
sitePluginsFinishedLoading = false;
constructor() {
this.logger = CoreLogger.getInstance('CoreSitePluginsProvider');
const observer = CoreEvents.on(CoreEvents.SITE_PLUGINS_LOADED, () => {
this.sitePluginsFinishedLoading = true;
observer?.off();
});
// Initialize deferred at start and on logout.
this.fetchPluginsDeferred = CoreUtils.promiseDefer();
CoreEvents.on(CoreEvents.LOGOUT, () => {
this.fetchPluginsDeferred = CoreUtils.promiseDefer();
});
}
/**
* Add some params that will always be sent for get content.
*
* @param args Original params.
* @param site Site. If not defined, current site.
* @return Promise resolved with the new params.
*/
protected async addDefaultArgs<T extends Record<string, unknown> = Record<string, unknown>>(
args: T,
site?: CoreSite,
): Promise<T & CoreSitePluginsDefaultArgs> {
args = args || {};
site = site || CoreSites.getCurrentSite();
const lang = await CoreLang.getCurrentLanguage();
const defaultArgs: CoreSitePluginsDefaultArgs = {
userid: <number> args.userid ?? site?.getUserId(),
appid: CoreConstants.CONFIG.app_id,
appversioncode: CoreConstants.CONFIG.versioncode,
appversionname: CoreConstants.CONFIG.versionname,
applang: lang,
appcustomurlscheme: CoreConstants.CONFIG.customurlscheme,
appisdesktop: false,
appismobile: CoreApp.isMobile(),
appiswide: CoreApp.isWide(),
appplatform: 'browser',
};
if (args.appismobile) {
defaultArgs.appplatform = CoreApp.isIOS() ? 'ios' : 'android';
}
return {
...args,
...defaultArgs,
};
}
/**
* Call a WS for a site plugin.
*
* @param method WS method to use.
* @param data Data to send to the WS.
* @param preSets Extra options.
* @param siteId Site ID. If not defined, current site.
* @return Promise resolved with the response.
*/
async callWS<T = unknown>(
method: string,
data: Record<string, unknown>,
preSets?: CoreSiteWSPreSets,
siteId?: string,
): Promise<T> {
const site = await CoreSites.getSite(siteId);
preSets = preSets || {};
preSets.cacheKey = preSets.cacheKey || this.getCallWSCacheKey(method, data);
return site.read<T>(method, data, preSets);
}
/**
* Given the result of a init get_content and, optionally, the result of another get_content,
* build an object with the data to pass to the JS of the get_content.
*
* @param initResult Result of the init WS call.
* @param contentResult Result of the content WS call (if any).
* @return An object with the data to pass to the JS.
*/
createDataForJS(
initResult?: CoreSitePluginsContent | null,
contentResult?: CoreSitePluginsContent | null,
): Record<string, unknown> {
let data: Record<string, unknown> = {};
if (initResult) {
// First of all, add the data returned by the init JS (if any).
data = Object.assign(data, initResult.jsResult || {});
// Now add some data returned by the init WS call.
data.INIT_TEMPLATES = CoreUtils.objectToKeyValueMap(initResult.templates, 'id', 'html');
data.INIT_OTHERDATA = initResult.otherdata;
}
if (contentResult) {
// Now add the data returned by the content WS call.
data.CONTENT_TEMPLATES = CoreUtils.objectToKeyValueMap(contentResult.templates, 'id', 'html');
data.CONTENT_OTHERDATA = contentResult.otherdata;
}
return data;
}
/**
* Get cache key for a WS call.
*
* @param method Name of the method.
* @param data Data to identify the WS call.
* @return Cache key.
*/
getCallWSCacheKey(method: string, data: Record<string, unknown>): string {
return this.getCallWSCommonCacheKey(method) + ':' + CoreUtils.sortAndStringify(data);
}
/**
* Get common cache key for a WS call.
*
* @param method Name of the method.
* @return Cache key.
*/
protected getCallWSCommonCacheKey(method: string): string {
return ROOT_CACHE_KEY + 'ws:' + method;
}
/**
* Get a certain content for a site plugin.
*
* @param component Component where the class is. E.g. mod_assign.
* @param method Method to execute in the class.
* @param args The params for the method.
* @param preSets Extra options.
* @param siteId Site ID. If not defined, current site.
* @return Promise resolved with the result.
*/
async getContent(
component: string,
method: string,
args?: Record<string, unknown>,
preSets?: CoreSiteWSPreSets,
siteId?: string,
): Promise<CoreSitePluginsContentParsed> {
this.logger.debug(`Get content for component '${component}' and method '${method}'`);
const site = await CoreSites.getSite(siteId);
// Add some params that will always be sent.
args = args || {};
const argsToSend = await this.addDefaultArgs(args, site);
// Now call the WS.
const data: CoreSitePluginsGetContentWSParams = {
component: component,
method: method,
args: CoreUtils.objectToArrayOfObjects(argsToSend, 'name', 'value', true),
};
preSets = preSets || {};
preSets.cacheKey = this.getContentCacheKey(component, method, args);
preSets.updateFrequency = preSets.updateFrequency ?? CoreSite.FREQUENCY_OFTEN;
const result = await site.read<CoreSitePluginsGetContentWSResponse>('tool_mobile_get_content', data, preSets);
let otherData: Record<string, unknown> = {};
if (result.otherdata) {
otherData = <Record<string, unknown>> CoreUtils.objectToKeyValueMap(result.otherdata, 'name', 'value');
// Try to parse all properties that could be JSON encoded strings.
for (const name in otherData) {
const value = otherData[name];
if (typeof value == 'string' && (value[0] == '{' || value[0] == '[')) {
otherData[name] = CoreTextUtils.parseJSON(value);
}
}
}
return Object.assign(result, { otherdata: otherData });
}
/**
* Get cache key for get content WS calls.
*
* @param component Component where the class is. E.g. mod_assign.
* @param method Method to execute in the class.
* @param args The params for the method.
* @return Cache key.
*/
protected getContentCacheKey(component: string, method: string, args: Record<string, unknown>): string {
return ROOT_CACHE_KEY + 'content:' + component + ':' + method + ':' + CoreUtils.sortAndStringify(args);
}
/**
* Get the value of a WS param for prefetch.
*
* @param component The component of the handler.
* @param paramName Name of the param as defined by the handler.
* @param courseId Course ID (if prefetching a course).
* @param module The module object returned by WS (if prefetching a module).
* @return The value.
*/
protected getDownloadParam(
component: string,
paramName: string,
courseId?: number,
module?: CoreCourseAnyModuleData,
): [number] | number | undefined {
switch (paramName) {
case 'courseids':
// The WS needs the list of course IDs. Create the list.
return [courseId || 0];
case component + 'id':
// The WS needs the instance id.
return module && module.instance;
default:
// No more params supported for now.
}
}
/**
* Get the unique name of a handler (plugin + handler).
*
* @param plugin Data of the plugin.
* @param handlerName Name of the handler inside the plugin.
* @return Unique name.
*/
getHandlerUniqueName(plugin: CoreSitePluginsPlugin, handlerName: string): string {
return plugin.addon + '_' + handlerName;
}
/**
* Get site plugins for site.
*
* @param siteId Site ID.
* @return Promise resolved with the plugins.
*/
async getPlugins(siteId?: string): Promise<CoreSitePluginsPlugin[]> {
const site = await CoreSites.getSite(siteId);
// Get the list of plugins. Try not to use cache.
const data = await site.read<CoreSitePluginsGetPluginsSupportingMobileWSResponse>(
'tool_mobile_get_plugins_supporting_mobile',
{},
{ getFromCache: false },
);
// Return enabled plugins.
return data.plugins.filter((plugin) => this.isSitePluginEnabled(plugin, site));
}
/**
* Get a site plugin handler.
*
* @param name Unique name of the handler.
* @return Handler.
*/
getSitePluginHandler(name: string): CoreSitePluginsHandler | undefined {
return this.sitePlugins[name];
}
/**
* Get the current site plugin list.
*
* @return Plugin list ws info.
*/
getCurrentSitePluginList(): CoreSitePluginsWSPlugin[] {
return CoreUtils.objectToArray(this.sitePlugins).map((plugin) => plugin.plugin);
}
/**
* Invalidate all WS call to a certain method.
*
* @param method WS method to use.
* @param siteId Site ID. If not defined, current site.
* @return Promise resolved when the data is invalidated.
*/
async invalidateAllCallWSForMethod(method: string, siteId?: string): Promise<void> {
const site = await CoreSites.getSite(siteId);
await site.invalidateWsCacheForKeyStartingWith(this.getCallWSCommonCacheKey(method));
}
/**
* Invalidate a WS call.
*
* @param method WS method to use.
* @param data Data to send to the WS.
* @param preSets Extra options.
* @param siteId Site ID. If not defined, current site.
* @return Promise resolved when the data is invalidated.
*/
async invalidateCallWS(
method: string,
data: Record<string, unknown>,
preSets?: CoreSiteWSPreSets,
siteId?: string,
): Promise<void> {
const site = await CoreSites.getSite(siteId);
preSets = preSets || {};
await site.invalidateWsCacheForKey(preSets.cacheKey || this.getCallWSCacheKey(method, data));
}
/**
* Invalidate a page content.
*
* @param component Component where the class is. E.g. mod_assign.
* @param method Method to execute in the class.
* @param args The params for the method.
* @param siteId Site ID. If not defined, current site.
* @return Promise resolved when the data is invalidated.
*/
async invalidateContent(component: string, callback: string, args?: Record<string, unknown>, siteId?: string): Promise<void> {
const site = await CoreSites.getSite(siteId);
await site.invalidateWsCacheForKey(this.getContentCacheKey(component, callback, args || {}));
}
/**
* Check if the get content WS is available.
*
* @deprecated since app 4.0
*/
isGetContentAvailable(): boolean {
return true;
}
/**
* Check if a handler is enabled for a certain course.
*
* @param courseId Course ID to check.
* @param restrictEnrolled If true or undefined, handler is only enabled for courses the user is enrolled in.
* @param restrict Users and courses the handler is restricted to.
* @return Whether the handler is enabled.
*/
async isHandlerEnabledForCourse(
courseId: number,
restrictEnrolled?: boolean,
restrict?: CoreSitePluginsContentRestrict,
): Promise<boolean> {
if (restrict?.courses?.indexOf(courseId) == -1) {
// Course is not in the list of restricted courses.
return false;
}
if (restrictEnrolled || restrictEnrolled === undefined) {
// Only enabled for courses the user is enrolled to. Check if the user is enrolled in the course.
try {
await CoreCourses.getUserCourse(courseId, true);
} catch {
return false;
}
}
return true;
}
/**
* Check if a handler is enabled for a certain user.
*
* @param userId User ID to check.
* @param restrictCurrent Whether handler is only enabled for current user.
* @param restrict Users and courses the handler is restricted to.
* @return Whether the handler is enabled.
*/
isHandlerEnabledForUser(userId: number, restrictCurrent?: boolean, restrict?: CoreSitePluginsContentRestrict): boolean {
if (restrictCurrent && userId != CoreSites.getCurrentSite()?.getUserId()) {
// Only enabled for current user.
return false;
}
if (restrict?.users?.indexOf(userId) == -1) {
// User is not in the list of restricted users.
return false;
}
return true;
}
/**
* Check if a certain plugin is a site plugin and it's enabled in a certain site.
*
* @param plugin Data of the plugin.
* @param site Site affected.
* @return Whether it's a site plugin and it's enabled.
*/
isSitePluginEnabled(plugin: CoreSitePluginsPlugin, site: CoreSite): boolean {
if (site.isFeatureDisabled('sitePlugin_' + plugin.component + '_' + plugin.addon) || !plugin.handlers) {
return false;
}
// Site plugin not disabled. Check if it has handlers.
if (!plugin.parsedHandlers) {
plugin.parsedHandlers = CoreTextUtils.parseJSON(
plugin.handlers,
null,
this.logger.error.bind(this.logger, 'Error parsing site plugin handlers'),
);
}
return !!(plugin.parsedHandlers && Object.keys(plugin.parsedHandlers).length);
}
/**
* Load other data into args as determined by useOtherData list.
* If useOtherData is undefined, it won't add any data.
* If useOtherData is an array, it will only copy the properties whose names are in the array.
* If useOtherData is any other value, it will copy all the data from otherData to args.
*
* @param args The current args.
* @param otherData All the other data.
* @param useOtherData Names of the attributes to include.
* @return New args.
*/
loadOtherDataInArgs(
args: Record<string, unknown> | undefined,
otherData?: Record<string, unknown>,
useOtherData?: string[] | unknown,
): Record<string, unknown> {
if (!args) {
args = {};
} else {
args = CoreUtils.clone(args);
}
otherData = otherData || {};
if (useOtherData === undefined) {
// No need to add other data, return args as they are.
return args;
} else if (Array.isArray(useOtherData)) {
// Include only the properties specified in the array.
for (const i in useOtherData) {
const name = useOtherData[i];
if (typeof otherData[name] == 'object' && otherData[name] !== null) {
// Stringify objects.
args[name] = JSON.stringify(otherData[name]);
} else {
args[name] = otherData[name];
}
}
} else {
// Add all the data to args.
for (const name in otherData) {
if (typeof otherData[name] == 'object' && otherData[name] !== null) {
// Stringify objects.
args[name] = JSON.stringify(otherData[name]);
} else {
args[name] = otherData[name];
}
}
}
return args;
}
/**
* Prefetch offline functions for a site plugin handler.
*
* @param component The component of the handler.
* @param args Params to send to the get_content calls.
* @param handlerSchema The handler schema.
* @param courseId Course ID (if prefetching a course).
* @param module The module object returned by WS (if prefetching a module).
* @param prefetch True to prefetch, false to download right away.
* @param dirPath Path of the directory where to store all the content files.
* @param site Site. If not defined, current site.
* @return Promise resolved when done.
*/
async prefetchFunctions(
component: string,
args: Record<string, unknown>,
handlerSchema: CoreSitePluginsCourseModuleHandlerData,
courseId?: number,
module?: CoreCourseAnyModuleData,
prefetch?: boolean,
dirPath?: string,
site?: CoreSite,
): Promise<void> {
site = site || CoreSites.getCurrentSite();
if (!site || !handlerSchema.offlinefunctions) {
return;
}
const siteInstance = site;
const offlineFunctions = handlerSchema.offlinefunctions;
await Promise.all(Object.keys(handlerSchema.offlinefunctions).map(async(method) => {
if (siteInstance.wsAvailable(method)) {
// The method is a WS.
const paramsList = offlineFunctions[method];
const cacheKey = this.getCallWSCacheKey(method, args);
let params: Record<string, unknown> = {};
if (!paramsList.length) {
// No params defined, send the default ones.
params = args;
} else {
for (const i in paramsList) {
const paramName = paramsList[i];
if (args[paramName] !== undefined) {
params[paramName] = args[paramName];
} else {
// The param is not one of the default ones. Try to calculate the param to use.
const value = this.getDownloadParam(component, paramName, courseId, module);
if (value !== undefined) {
params[paramName] = value;
}
}
}
}
await this.callWS(method, params, { cacheKey });
return;
}
// It's a method to get content.
const preSets: CoreSiteWSPreSets = {
component: component,
};
if (module) {
preSets.componentId = module.id;
}
const result = await this.getContent(component, method, args, preSets);
// Prefetch the files in the content.
if (result.files.length) {
await CoreFilepool.downloadOrPrefetchFiles(
siteInstance.getId(),
result.files,
!!prefetch,
false,
component,
module?.id,
dirPath,
);
}
}));
}
/**
* Store a site plugin handler.
*
* @param name A unique name to identify the handler.
* @param handler Handler to set.
*/
setSitePluginHandler(name: string, handler: CoreSitePluginsHandler): void {
this.sitePlugins[name] = handler;
}
/**
* Store the promise for a plugin that is being initialised.
*
* @param component
* @param promise
*/
registerSitePluginPromise(component: string, promise: Promise<void>): void {
this.sitePluginPromises[component] = promise;
}
/**
* Set plugins fetched.
*/
setPluginsFetched(): void {
this.fetchPluginsDeferred.resolve();
}
/**
* Set plugins fetched.
*/
setPluginsLoaded(loaded?: boolean): void {
this.hasSitePluginsLoaded = !!loaded;
}
/**
* Is a plugin being initialised for the specified component?
*
* @param component
*/
sitePluginPromiseExists(component: string): boolean {
return !!this.sitePluginPromises[component];
}
/**
* Get the promise for a plugin that is being initialised.
*
* @param component
*/
sitePluginLoaded(component: string): Promise<void> | undefined {
return this.sitePluginPromises[component];
}
/**
* Wait for fetch plugins to be done.
*
* @return Promise resolved when site plugins have been fetched.
*/
waitFetchPlugins(): Promise<void> {
return this.fetchPluginsDeferred.promise;
}
/**
* Get a module hander instance, if present.
*
* @param modName Mod name without "mod_".
* @return Handler instance, undefined if not found.
*/
getModuleHandlerInstance(modName: string): CoreSitePluginsModuleHandler | undefined {
return this.moduleHandlerInstances[modName];
}
/**
* Set a module hander instance.
*
* @param modName Mod name.
* @param handler Handler instance.
*/
setModuleHandlerInstance(modName: string, handler: CoreSitePluginsModuleHandler): void {
this.moduleHandlerInstances[modName] = handler;
}
}
export const CoreSitePlugins = makeSingleton(CoreSitePluginsProvider);
/**
* Handler of a site plugin.
*/
export type CoreSitePluginsHandler = {
plugin: CoreSitePluginsPlugin; // Site plugin data.
handlerName: string; // Name of the handler.
handlerSchema: CoreSitePluginsHandlerData; // Handler's data.
initResult?: CoreSitePluginsContent | null; // Result of the init WS call (if any).
};
/**
* Default args added to site plugins calls.
*/
export type CoreSitePluginsDefaultArgs = {
userid?: number;
appid: string;
appversioncode: number;
appversionname: string;
applang: string;
appcustomurlscheme: string;
appisdesktop: boolean;
appismobile: boolean;
appiswide: boolean;
appplatform: string;
};
/**
* Params of tool_mobile_get_content WS.
*/
export type CoreSitePluginsGetContentWSParams = {
component: string; // Component where the class is e.g. mod_assign.
method: string; // Method to execute in class \$component\output\mobile.
args?: { // Args for the method are optional.
name: string; // Param name.
value: string; // Param value.
}[];
};
/**
* Data returned by tool_mobile_get_content WS.
*/
export type CoreSitePluginsGetContentWSResponse = {
templates: CoreSitePluginsContentTemplate[]; // Templates required by the generated content.
javascript: string; // JavaScript code.
otherdata: { // Other data that can be used or manipulated by the template via 2-way data-binding.
name: string; // Field name.
value: string; // Field value.
}[];
files: CoreWSExternalFile[];
restrict: CoreSitePluginsContentRestrict; // Restrict this content to certain users or courses.
disabled?: boolean; // Whether we consider this disabled or not.
};
/**
* Template data returned by tool_mobile_get_content WS.
*/
export type CoreSitePluginsContentTemplate = {
id: string; // ID of the template.
html: string; // HTML code.
};
/**
* Template data returned by tool_mobile_get_content WS.
*/
export type CoreSitePluginsContentRestrict = {
users?: number[]; // List of allowed users.
courses?: number[]; // List of allowed courses.
};
/**
* Data returned by tool_mobile_get_content WS with calculated data.
*/
export type CoreSitePluginsContentParsed = Omit<CoreSitePluginsGetContentWSResponse, 'otherdata'> & {
otherdata: Record<string, unknown>; // Other data that can be used or manipulated by the template via 2-way data-binding.
};
/**
* Data returned by tool_mobile_get_content WS with calculated data.
*/
export type CoreSitePluginsContent = CoreSitePluginsContentParsed & {
disabled?: boolean;
jsResult?: any; // eslint-disable-line @typescript-eslint/no-explicit-any
};
/**
* Data returned by tool_mobile_get_plugins_supporting_mobile WS.
*/
export type CoreSitePluginsGetPluginsSupportingMobileWSResponse = {
plugins: CoreSitePluginsWSPlugin[];
warnings?: CoreWSExternalWarning[];
};
/**
* Plugin data returned by tool_mobile_get_plugins_supporting_mobile WS.
*/
export type CoreSitePluginsWSPlugin = {
component: string; // The plugin component name.
version: string; // The plugin version number.
addon: string; // The Mobile addon (package) name.
dependencies: string[]; // The list of Mobile addons this addon depends on.
fileurl: string; // The addon package url for download or empty if it doesn't exist.
filehash: string; // The addon package hash or empty if it doesn't exist.
filesize: number; // The addon package size or empty if it doesn't exist.
handlers?: string; // Handlers definition (JSON).
lang?: string; // Language strings used by the handlers (JSON).
};
/**
* Plugin data with some calculated data.
*/
export type CoreSitePluginsPlugin = CoreSitePluginsWSPlugin & {
parsedHandlers?: Record<string, CoreSitePluginsHandlerData> | null;
parsedLang?: Record<string, string[]> | null;
};
/**
* Plugin handler data.
*/
export type CoreSitePluginsHandlerData = CoreSitePluginsInitHandlerData | CoreSitePluginsCourseOptionHandlerData |
CoreSitePluginsMainMenuHandlerData | CoreSitePluginsCourseModuleHandlerData | CoreSitePluginsCourseFormatHandlerData |
CoreSitePluginsUserHandlerData | CoreSitePluginsSettingsHandlerData | CoreSitePluginsMessageOutputHandlerData |
CoreSitePluginsBlockHandlerData | CoreSitePluginsMainMenuHomeHandlerData;
/**
* Plugin handler data common to all delegates.
*/
export type CoreSitePluginsHandlerCommonData = {
delegate?: string;
method?: string;
init?: string;
restricttocurrentuser?: boolean;
restricttoenrolledcourses?: boolean;
styles?: {
url?: string;
version?: number;
};
moodlecomponent?: string;
};
/**
* Course option handler specific data.
*/
export type CoreSitePluginsCourseOptionHandlerData = CoreSitePluginsHandlerCommonData & {
displaydata?: {
title?: string;
class?: string;
icon?: string;
};
priority?: number;
ismenuhandler?: boolean;
ptrenabled?: boolean;
};
/**
* Main menu handler specific data.
*/
export type CoreSitePluginsMainMenuHandlerData = CoreSitePluginsHandlerCommonData & {
displaydata?: {
title?: string;
icon?: string;
class?: string;
};
priority?: number;
ptrenabled?: boolean;
};
/**
* Course module handler specific data.
*/
export type CoreSitePluginsCourseModuleHandlerData = CoreSitePluginsHandlerCommonData & {
displaydata?: {
icon?: string;
class?: string;
};
method?: string;
offlinefunctions?: Record<string, string[]>;
downloadbutton?: boolean;
isresource?: boolean;
updatesnames?: string;
displayopeninbrowser?: boolean;
displaydescription?: boolean;
displayrefresh?: boolean;
displayprefetch?: boolean;
displaysize?: boolean;
displaygrades?: boolean;
coursepagemethod?: string;
ptrenabled?: boolean;
supportedfeatures?: Record<string, unknown>;
manualcompletionalwaysshown?: boolean;
nolinkhandlers?: boolean;
};
/**
* Course format handler specific data.
*/
export type CoreSitePluginsCourseFormatHandlerData = CoreSitePluginsHandlerCommonData & {
canviewallsections?: boolean;
displayenabledownload?: boolean;
/**
* @deprecated on 4.0, use displaycourseindex instead.
*/
displaysectionselector?: boolean;
displaycourseindex?: boolean;
};
/**
* User handler specific data.
*/
export type CoreSitePluginsUserHandlerData = CoreSitePluginsHandlerCommonData & {
displaydata?: {
title?: string;
icon?: string;
class?: string;
};
type?: string;
priority?: number;
ptrenabled?: boolean;
};
/**
* Settings handler specific data.
*/
export type CoreSitePluginsSettingsHandlerData = CoreSitePluginsHandlerCommonData & {
displaydata?: {
title?: string;
icon?: string;
class?: string;
};
priority?: number;
ptrenabled?: boolean;
};
/**
* Message output handler specific data.
*/
export type CoreSitePluginsMessageOutputHandlerData = CoreSitePluginsHandlerCommonData & {
displaydata?: {
title?: string;
icon?: string;
};
priority?: number;
ptrenabled?: boolean;
};
/**
* Block handler specific data.
*/
export type CoreSitePluginsBlockHandlerData = CoreSitePluginsHandlerCommonData & {
displaydata?: {
title?: string;
class?: string;
type?: string;
};
fallback?: string;
};
/**
* Common handler data with some data from the init method.
*/
export type CoreSitePluginsInitHandlerData = CoreSitePluginsHandlerCommonData & {
methodTemplates?: CoreSitePluginsContentTemplate[];
methodJSResult?: any; // eslint-disable-line @typescript-eslint/no-explicit-any
methodOtherdata?: Record<string, unknown>;
};
/**
* Main menu home handler specific data.
*/
export type CoreSitePluginsMainMenuHomeHandlerData = CoreSitePluginsHandlerCommonData & {
displaydata?: {
title?: string;
class?: string;
};
priority?: number;
ptrenabled?: boolean;
};
/**
* Event to update course content data for plugins using coursepagemethod.
*/
export type CoreSitePluginsUpdateCourseContentEvent = {
cmId: number; // Module ID to update.
alreadyFetched?: boolean; // Whether course data has already been fetched (no need to fetch it again).
};
declare module '@singletons/events' {
/**
* Augment CoreEventsData interface with events specific to this service.
*
* @see https://www.typescriptlang.org/docs/handbook/declaration-merging.html#module-augmentation
*/
export interface CoreEventsData {
[CoreSitePluginsProvider.UPDATE_COURSE_CONTENT]: CoreSitePluginsUpdateCourseContentEvent;
}
} | the_stack |
import { By, EditorView, InputBox, SideBarView, TextEditor, WebView } from "vscode-extension-tester";
import * as path from "path";
import { h5ComponentWithText } from "./helpers/CommonLocators";
import { EditorTabs } from "./helpers/dmn/EditorTabs";
import { assertWebElementIsDisplayedEnabled } from "./helpers/CommonAsserts";
import VSCodeTestHelper from "./helpers/VSCodeTestHelper";
import BpmnEditorTestHelper, { PaletteCategories } from "./helpers/bpmn/BpmnEditorTestHelper";
import ScesimEditorTestHelper from "./helpers/ScesimEditorTestHelper";
import DmnEditorTestHelper from "./helpers/dmn/DmnEditorTestHelper";
import PmmlEditorTestHelper from "./helpers/PmmlEditorTestHelper";
import { assert } from "chai";
import {
palletteItemAnchor,
processVariableDataTypeInput,
processVariableNameInput,
} from "./helpers/bpmn/BpmnLocators";
import DecisionNavigatorHelper from "./helpers/dmn/DecisionNavigatorHelper";
import { PropertiesPanelSection } from "./helpers/bpmn/PropertiesPanelHelper";
import { TextEdit } from "vscode";
describe("Editors are loading properly", () => {
const RESOURCES: string = path.resolve("it-tests-tmp", "resources");
const DEMO_BPMN: string = "demo.bpmn";
const DEMO_DMN: string = "demo.dmn";
const DEMO_DMN_SCESIM: string = "demo-dmn.scesim";
const DEMO_EXPRESSION_DMN: string = "demo-expression.dmn";
const DEMO_SCESIM: string = "demo.scesim";
const DEMO_PMML: string = "demo.pmml";
const REUSABLE_DMN: string = "reusable-model.dmn";
const WID_BPMN: string = "process-wid.bpmn";
let testHelper: VSCodeTestHelper;
let webview: WebView;
let folderView: SideBarView;
before(async function () {
this.timeout(60000);
testHelper = new VSCodeTestHelper();
folderView = await testHelper.openFolder(RESOURCES);
});
beforeEach(async function () {
await testHelper.closeAllEditors();
await testHelper.closeAllNotifications();
});
afterEach(async function () {
this.timeout(15000);
await testHelper.closeAllEditors();
await testHelper.closeAllNotifications();
await webview.switchBack();
});
it("Opens demo.bpmn file in BPMN Editor and loads correct diagram", async function () {
this.timeout(20000);
webview = await testHelper.openFileFromSidebar(DEMO_BPMN);
await testHelper.switchWebviewToFrame(webview);
const bpmnEditorTester = new BpmnEditorTestHelper(webview);
const palette = await bpmnEditorTester.getPalette();
await assertWebElementIsDisplayedEnabled(palette);
await bpmnEditorTester.openDiagramProperties();
const explorer = await bpmnEditorTester.openDiagramExplorer();
await explorer.assertDiagramNodeIsPresent("Start");
await explorer.assertDiagramNodeIsPresent("End");
await webview.switchBack();
});
it("Opens demo.dmn file in DMN Editor", async function () {
this.timeout(20000);
webview = await testHelper.openFileFromSidebar(DEMO_DMN);
await testHelper.switchWebviewToFrame(webview);
const dmnEditorTester = new DmnEditorTestHelper(webview);
await dmnEditorTester.openDiagramProperties();
await dmnEditorTester.openDiagramExplorer();
await dmnEditorTester.openDecisionNavigator();
await webview.switchBack();
});
it("Include reusable-model in DMN Editor", async function () {
this.timeout(20000);
webview = await testHelper.openFileFromSidebar(DEMO_DMN);
await testHelper.switchWebviewToFrame(webview);
const dmnEditorTester = new DmnEditorTestHelper(webview);
await dmnEditorTester.switchEditorTab(EditorTabs.IncludedModels);
await dmnEditorTester.includeModel(REUSABLE_DMN, "reusable-model");
// Blocked by https://issues.redhat.com/browse/KOGITO-4261
// await dmnEditorTester.inspectIncludedModel("reusable-model", 2)
await dmnEditorTester.switchEditorTab(EditorTabs.Editor);
await webview.switchBack();
});
it("Undo command in DMN Editor", async function () {
this.timeout(40000);
webview = await testHelper.openFileFromSidebar(DEMO_DMN);
await testHelper.switchWebviewToFrame(webview);
const dmnEditorTester = new DmnEditorTestHelper(webview);
const decisionNavigator = await dmnEditorTester.openDecisionNavigator();
await decisionNavigator.selectDiagramNode("?DemoDecision1");
const diagramProperties = await dmnEditorTester.openDiagramProperties();
await diagramProperties.changeProperty("Name", "Updated Name 1");
const navigatorPanel: DecisionNavigatorHelper = await dmnEditorTester.openDecisionNavigator();
await navigatorPanel.assertDiagramNodeIsPresent("Updated Name 1");
await navigatorPanel.assertDiagramNodeIsPresent("?DecisionFinal1");
await webview.switchBack();
// changeProperty() is implemented as clear() and sendKeys(), that is why we need two undo operations
await testHelper.executeCommandFromPrompt("Undo");
await testHelper.executeCommandFromPrompt("Undo");
await testHelper.switchWebviewToFrame(webview);
await navigatorPanel.assertDiagramNodeIsPresent("?DemoDecision1");
await navigatorPanel.assertDiagramNodeIsPresent("?DecisionFinal1");
await webview.switchBack();
});
it("Check new DMN Expression Editor", async function () {
this.timeout(40000);
webview = await testHelper.openFileFromSidebar(DEMO_EXPRESSION_DMN);
await testHelper.switchWebviewToFrame(webview);
const dmnEditorTester = new DmnEditorTestHelper(webview);
const decisionNavigator = await dmnEditorTester.openDecisionNavigator();
await decisionNavigator.selectNodeExpression("context demo", "Context");
const contextEditor = await dmnEditorTester.getExpressionEditor();
await contextEditor.activateBetaVersion();
await contextEditor.assertExpressionDetails("context demo", "string");
await decisionNavigator.selectNodeExpression("function demo", "Function");
const functionEditor = await dmnEditorTester.getExpressionEditor();
await functionEditor.activateBetaVersion();
await functionEditor.assertExpressionDetails("function demo", "string");
await decisionNavigator.selectNodeExpression("decision table demo", "Decision Table");
const decisionTableEditor = await dmnEditorTester.getExpressionEditor();
await decisionTableEditor.activateBetaVersion();
await decisionTableEditor.assertExpressionDetails("decision table demo", "string");
await webview.switchBack();
});
it("Opens demo.scesim file in SCESIM Editor", async function () {
this.timeout(20000);
webview = await testHelper.openFileFromSidebar(DEMO_SCESIM);
await testHelper.switchWebviewToFrame(webview);
const scesimEditorTester = new ScesimEditorTestHelper(webview);
await scesimEditorTester.openScenarioCheatsheet();
await scesimEditorTester.openSettings();
await scesimEditorTester.openTestTools();
await webview.switchBack();
});
/**
* As the opened sceism file is empty, a prompt to specify file under test should be shown
*/
it("Opens demo-dmn.scesim file in SCESIM Editor", async function () {
this.timeout(20000);
webview = await testHelper.openFileFromSidebar(DEMO_DMN_SCESIM);
await testHelper.switchWebviewToFrame(webview);
const scesimEditorTester = new ScesimEditorTestHelper(webview);
await scesimEditorTester.specifyDmnOnLandingPage(DEMO_DMN);
await webview.switchBack();
// save file so we can check the plain text source
await testHelper.executeCommandFromPrompt("File: Save");
// check plain text source starts with <?xml?> prolog
await testHelper.executeCommandFromPrompt("View: Reopen Editor With...");
const input = await InputBox.create();
await input.selectQuickPick("Text Editor");
const xmlProlog = '<?xml version="1.0" encoding="UTF-8"?>';
const plainText = new TextEditor();
assert.equal(await plainText.getTextAtLine(1), xmlProlog, "First line should be an <?xml?> prolog");
assert.notEqual(await plainText.getTextAtLine(2), xmlProlog, "<?xml?> prolog should be there just once");
});
it("Opens demo.pmml file in PMML Editor", async function () {
this.timeout(20000);
webview = await testHelper.openFileFromSidebar(DEMO_PMML);
await testHelper.switchWebviewToFrame(webview);
const pmmlEditorTester = new PmmlEditorTestHelper(webview);
const dataDictionaryModel = await pmmlEditorTester.openDataDictionary();
dataDictionaryModel.close();
const miningSchemaModel = await pmmlEditorTester.openMiningSchema();
miningSchemaModel.close();
const outputsModal = await pmmlEditorTester.openOutputs();
outputsModal.close();
await webview.switchBack();
});
it("Opens process with work item definition properly", async function () {
this.timeout(20000);
webview = await testHelper.openFileFromSidebar(WID_BPMN, "src/main/java/org/kie/businessapp");
await testHelper.switchWebviewToFrame(webview);
const bpmnEditorTester = new BpmnEditorTestHelper(webview);
const customTasksPaletteCategory = await bpmnEditorTester.openDiagramPalette(PaletteCategories.CUSTOM_TASKS);
assertWebElementIsDisplayedEnabled(await customTasksPaletteCategory.findElement(h5ComponentWithText("Milestone")));
assertWebElementIsDisplayedEnabled(
await customTasksPaletteCategory.findElement(h5ComponentWithText("CustomTasks"))
);
assertWebElementIsDisplayedEnabled(
await customTasksPaletteCategory.findElement(palletteItemAnchor("CreateCustomer"))
);
assertWebElementIsDisplayedEnabled(await customTasksPaletteCategory.findElement(palletteItemAnchor("Email")));
const explorer = await bpmnEditorTester.openDiagramExplorer();
await explorer.assertDiagramNodeIsPresent("Create Customer Internal Service");
await explorer.assertDiagramNodeIsPresent("Start");
await explorer.assertDiagramNodeIsPresent("Email");
await explorer.assertDiagramNodeIsPresent("End");
await explorer.selectDiagramNode("Create Customer Internal Service");
const propertiesPanel = await bpmnEditorTester.openDiagramProperties();
await propertiesPanel.assertPropertyValue("Name", "Create Customer Internal Service", "textarea");
await propertiesPanel.assertPropertyValue(
"Documentation",
"Calls internal service that creates the customer in database server.",
"textarea"
);
await propertiesPanel.assertPropertyValue("Assignments", "7 data inputs, 1 data output", "div/input");
await webview.switchBack();
});
it("Saves a change of process name in BPMN editor properly", async function () {
this.timeout(60000);
webview = await testHelper.openFileFromSidebar("SaveAssetTest.bpmn");
await testHelper.switchWebviewToFrame(webview);
let bpmnEditorTester = new BpmnEditorTestHelper(webview);
let properties = await bpmnEditorTester.openDiagramProperties();
let processNameInputField = await properties.getProperty("Name");
assert.isTrue(await processNameInputField.isEnabled());
const formerProcessId = await processNameInputField.getAttribute("value");
assert.isDefined(formerProcessId);
assert.isNotEmpty(formerProcessId);
await processNameInputField.sendKeys("Renamed");
await bpmnEditorTester.openDiagramExplorer();
await webview.switchBack();
await testHelper.executeCommandFromPrompt("File: Save");
await testHelper.closeAllEditors();
webview = await testHelper.openFileFromSidebar("SaveAssetTest.bpmn");
await testHelper.switchWebviewToFrame(webview);
bpmnEditorTester = new BpmnEditorTestHelper(webview);
properties = await bpmnEditorTester.openDiagramProperties();
processNameInputField = await properties.getProperty("Name");
assert.isTrue(await processNameInputField.isEnabled());
assert.equal(await processNameInputField.getAttribute("value"), formerProcessId + "Renamed");
await webview.switchBack();
});
it("Reuses Data-types across BPMN editor", async function () {
this.timeout(40000);
webview = await testHelper.openFileFromSidebar("ReuseDataTypeTest.bpmn");
await testHelper.switchWebviewToFrame(webview);
const bpmnEditorTester = new BpmnEditorTestHelper(webview);
const variableName = "fuelAccelerator";
const dataTypeType = "com.superbankofpeople.FuelAccelerator";
const dataTypeTypeBracketFormat = "FuelAccelerator [com.superbankofpeople]";
let propertiesPanel = await bpmnEditorTester.openDiagramProperties();
propertiesPanel = await propertiesPanel.addProcessVariable(variableName, dataTypeType, true);
await bpmnEditorTester.openDiagramExplorer();
propertiesPanel = await bpmnEditorTester.openDiagramProperties();
propertiesPanel = await propertiesPanel.expandPropertySection(PropertiesPanelSection.PROCESS_DATA);
const processVariableNameInputField = await propertiesPanel.rootElement.findElement(processVariableNameInput());
const processVariableDataTypeInputField = await propertiesPanel.rootElement.findElement(
processVariableDataTypeInput()
);
await bpmnEditorTester.scrollElementIntoView(processVariableNameInputField);
await processVariableDataTypeInputField.click();
const customDataTypeEditOption = await processVariableDataTypeInputField.findElement(
By.xpath("//select/option[@value='Edit " + dataTypeType + " ...']")
);
assertWebElementIsDisplayedEnabled(customDataTypeEditOption);
propertiesPanel = await propertiesPanel.expandPropertySection(PropertiesPanelSection.Advanced);
propertiesPanel = await propertiesPanel.addGlobalVariable(
"used_fuel_accelerator",
dataTypeTypeBracketFormat,
false
);
await webview.switchBack();
});
}); | the_stack |
import Fs from 'fs'
import Path from 'path'
import Csurf from 'csurf'
import crypto from 'crypto'
import Mustache from 'mustache'
import Passport from 'passport'
import Bcrypt from 'bcryptjs'
import CookieParser from 'cookie-parser'
import PassportLocal from 'passport-local'
import AsyncHandler from 'express-async-handler'
import { Router, RequestHandler, static as Static, Request } from 'express'
import { responseEnhancer } from 'express-response-formatter'
import ExpressSession, { CookieOptions } from 'express-session'
import ExpressSessionMikroORMStore, {
generateSessionEntity
} from 'express-session-mikro-orm'
import {
Utils,
route,
plugin,
Asset,
resource,
text,
event,
boolean,
belongsToMany,
dateTime,
select,
Config,
User,
belongsTo,
ApiContext,
RouteContract,
DashboardContract
} from '@tensei/common'
import getRoutes from './routes'
import { setupCms } from './setup'
import { DataPayload } from '@tensei/common/config'
import { changePasswordRoute } from './routes/change-password'
import { updateProfileRoute } from './routes/update-profile'
const indexFileContent = Fs.readFileSync(
Path.resolve(__dirname, 'template', 'index.mustache')
).toString()
const baseScripts = ['main.js', 'vendor.js', 'manifest.js']
class CmsPlugin {
private scripts: Asset[] = [
{
name: 'manifest.js',
path: Path.resolve(__dirname, 'public', 'manifest.js')
},
{
name: 'vendor.js',
path: Path.resolve(__dirname, 'public', 'vendor.js')
},
{
name: 'main.js',
path: Path.resolve(__dirname, 'public', 'main.js')
}
]
private config: {
path: string
apiPath: string
setup: () => any
cookieOptions: {}
userResource: string
permissionResource: string
tokenResource: string
roleResource: string
dashboards: DashboardContract[]
} = {
path: 'cms',
apiPath: 'cms/api',
setup: () => { },
cookieOptions: {},
dashboards: [],
userResource: 'Admin User',
permissionResource: 'Admin Permission',
roleResource: 'Admin Role',
tokenResource: 'Admin Token'
}
private router = Router()
cookies(cookieOptions: CookieOptions) {
this.config.cookieOptions = cookieOptions
return this
}
private styles: Asset[] = []
path(path: string) {
this.config.path = path
this.config.apiPath = `/${path}/api`
return this
}
private getApiPath = (path: string) => {
return `/api/${path}`
}
private resources = {
user: this.userResource(),
role: this.roleResource(),
token: this.tokenResource(),
permission: this.permissionResource()
}
public generateRandomToken(length = 32) {
return crypto.randomBytes(length).toString('hex')
}
private routes = () => [
route('Get CMS Csrf Token')
.get()
.path(this.getApiPath('csrf'))
.handle((request, response) => {
response.cookie('x-csrf-token', request.csrfToken())
return response.status(204).json()
}),
changePasswordRoute.path(this.getApiPath('auth/change-password')),
updateProfileRoute.path(this.getApiPath('auth/update-profile')),
route('Logout')
.path(this.getApiPath('auth/logout'))
.id('logout')
.post()
.handle(async (request, response) => {
request.session.destroy(error => {
if (error) {
return response.status(204).json()
}
response.clearCookie('connect.sid')
return response.status(204).json()
})
})
]
private loginPassport = async (request: Request, done: any) => {
const { config, manager, body } = request
const { indicative } = config
try {
const { email, password } = await indicative.validator.validate(body, {
email: 'required|email'
})
let user: any = await manager.findOne(
this.resources.user.data.pascalCaseName,
{
email
}
)
if (!user) {
return done(
[
{
field: 'email',
message: 'These credentials do not match our records.'
}
],
null
)
}
if (!Bcrypt.compareSync(password, user.password)) {
return done(
[
{
field: 'email',
message: 'These credentials do not match our records.'
}
],
null
)
}
return done(null, user)
} catch (errors) {
return done(errors, null)
}
}
private registerPassport = async (request: Request, done: any) => {
const { config, manager, body, resources, repositories } = request
const { emitter } = config
const adminCount = await manager.count(
this.resources.user.data.pascalCaseName,
{}
)
if (adminCount !== 0) {
return done(
[
{
message:
'An administrator already exists. Please join the team by requesting an invitation.',
field: 'email'
}
],
null
)
}
const validator = Utils.validator(this.userResource(), manager, resources)
const [success, payload] = await validator.validate(body)
if (!success) {
return done(payload, null)
}
let superAdminRole = await repositories.adminRoles().findOne({
slug: 'super-admin'
})
if (!superAdminRole) {
return done(
[
{
message:
'The Super Admin Role must exist before you create an administrator account.',
field: 'role'
}
],
null
)
}
let createUserPayload: any = {
...payload,
active: true,
adminRoles: [superAdminRole]
}
const admin: User = manager.create(
this.resources.user.data.pascalCaseName,
createUserPayload
)
await manager.persistAndFlush(admin)
emitter.emit('ADMIN_REGISTERED', admin)
return done(null, admin)
}
private permissionResource() {
return resource(this.config.permissionResource)
.fields([
text('Name').searchable().rules('required'),
text('Slug').rules('required').unique().searchable().rules('required'),
belongsToMany(this.config.roleResource)
])
.displayField('Name')
.hideOnApi()
.hideFromNavigation()
}
private tokenResource() {
return resource(this.config.tokenResource)
.fields([
select('Type').options([
{
label: 'Invite',
value: 'INVITATION'
}
]),
text('Token'),
dateTime('Expires At').nullable(),
belongsTo(this.config.userResource)
])
.hideFromNavigation()
.hideOnApi()
}
private roleResource() {
return resource(this.config.roleResource)
.fields([
text('Name')
.rules('required')
.unique()
.searchable()
.sortable()
.rules('required'),
text('Slug')
.rules('required', 'unique:slug')
.unique()
.sortable()
.searchable()
.rules('required'),
text('Description').nullable().rules('max:255'),
belongsToMany(this.config.userResource),
belongsToMany(this.config.permissionResource).owner()
])
.hideOnApi()
.displayField('Name')
.hideFromNavigation()
}
private userResource() {
return resource(this.config.userResource)
.fields([
text('First name')
.searchable()
.nullable()
.sortable()
.creationRules('required'),
text('Last name')
.searchable()
.nullable()
.sortable()
.creationRules('required'),
text('Password')
.rules('min:12')
.requiredOnCreate()
.nullable()
.hideOnFetchApi(),
text('Email')
.unique()
.searchable()
.sortable()
.notNullable()
.creationRules('required', 'email', 'unique:email')
.updateRules('unique:email'),
boolean('Active')
.nullable()
.sortable()
.defaultFormValue(true)
.default(true)
.rules('boolean'),
belongsToMany(this.config.roleResource).rules('array')
])
.displayField('First name')
.secondaryDisplayField('Email')
.hideOnApi()
.hideFromNavigation()
.beforeCreate(({ entity, em }) => {
const payload: DataPayload = {
password: entity.password
? Bcrypt.hashSync(entity.password)
: undefined
}
em.assign(entity, payload)
})
.beforeUpdate(async ({ entity, em, changeSet }) => {
if (changeSet?.payload.password) {
em.assign(entity, {
password: Bcrypt.hashSync(changeSet.payload.password)
})
}
})
}
sessionMikroOrmOptions = {
entityName: `${this.resources.user.data.pascalCaseName}Session`,
tableName: `${this.resources.user.data.camelCaseNamePlural}_sessions`,
collection: `${this.resources.user.data.camelCaseNamePlural}_sessions`
}
private authorizeResolver = async (ctx: ApiContext, query: RouteContract) => {
const authorized = await Promise.all(
query.config.authorize.map(fn => fn(ctx as any, ctx.entity))
)
if (
authorized.filter(result => result).length !==
query.config.authorize.length
) {
throw ctx.forbiddenError('Unauthorized.')
}
}
public dashboards(dashboards: DashboardContract[]) {
this.config.dashboards = [...this.config.dashboards, ...dashboards]
return this
}
plugin() {
return plugin('CMS')
.id('cms')
.extra({
path: this.config.path
})
.register(({ script, style, extendResources, databaseConfig }) => {
this.scripts.forEach(s => script(s.name, s.path, s.chunk))
this.styles.forEach(s => style(s.name, s.path))
databaseConfig.entities = [
...(databaseConfig.entities || []),
generateSessionEntity(this.sessionMikroOrmOptions)
]
extendResources([
this.resources.user,
this.resources.role,
this.resources.token,
this.resources.permission
])
})
.boot(async config => {
const { app, orm, resources, currentCtx } = config
const Store = ExpressSessionMikroORMStore(
ExpressSession,
this.sessionMikroOrmOptions
)
await setupCms(config, [this.resources.role, this.resources.permission])
this.router.use(CookieParser())
this.router.use(
ExpressSession({
resave: false,
saveUninitialized: false,
store: new Store({ orm }) as any,
cookie: this.config.cookieOptions,
secret: process.env.SESSION_SECRET || '__sessions__secret__'
})
)
this.router.use(Passport.initialize())
this.router.use(Passport.session())
const self = this
Passport.use(
'local-register',
new PassportLocal.Strategy(
{
usernameField: 'email',
passwordField: 'password',
passReqToCallback: true
},
async (request, email, password, done) => {
await self.registerPassport(request, done)
}
)
)
Passport.use(
'local-login',
new PassportLocal.Strategy(
{
usernameField: 'email',
passwordField: 'password',
passReqToCallback: true
},
async (request, email, password, done) => {
await this.loginPassport(request, done)
}
)
)
Passport.serializeUser((user, done) => {
done(null, {
id: (user as any).id
})
})
Passport.deserializeUser(async (request, id, done) => {
const user = await request.manager.findOne(
this.resources.user.data.pascalCaseName,
{
id: id.id
},
{
populate: ['adminRoles.adminPermissions']
}
)
done(null, user)
})
this.router.post(
`${this.getApiPath('auth/register')}`,
(request, response, next) => {
Passport.authenticate(
'local-register',
{
successRedirect: `${this.getApiPath('')}`,
failureRedirect: `${this.getApiPath('auth/register')}`
},
async (error, user) => {
if (user === false && !error) {
// This is a unique case, where the user did not provide the email or password.
// Passport is weird, so they do not even send the request to the controller when this happens.
// In this scenario, we'll perform validation here on the data, and send the correct validation errors back to the frontend.
const validator = Utils.validator(
self.userResource(),
request.manager,
request.resources
)
const [, payload] = await validator.validate(request.body)
return response.status(422).json({
errors: payload
})
}
if (error) {
return response.status(422).json({
errors: error
})
}
request.logIn(user, error => {
if (error) {
return next(error)
}
return response.status(204).json([])
})
}
)(request, response, next)
}
)
this.router.post(
`${this.getApiPath('auth/login')}`,
(request, response, next) => {
Passport.authenticate('local-login', {}, (error, user, info) => {
if (user === false) {
return response.status(422).json({
errors: [
{
message: 'Please provide your email.',
field: 'email'
},
{
message: 'Please provide your password.',
field: 'password'
}
]
})
}
if (error || !user) {
return response.status(400).json({
errors: error
})
}
request.logIn(user, error => {
if (error) {
return next(error)
}
return response.status(204).json([])
})
})(request, response, next)
}
)
this.router.use(responseEnhancer())
this.router.use((request, response, next) => {
// set filter parameters
resources.forEach(resource => {
resource.data.filters.forEach(filter => {
const filterFromBody = request.body.filters?.find(
(bodyFitler: any) => bodyFitler.name === filter.config.shortName
)
request.manager.setFilterParams(
filter.config.shortName,
filterFromBody?.args || {}
)
})
})
next()
})
this.router.use(Csurf())
;[...getRoutes(config, this.config), ...this.routes()].forEach(
route => {
const path = route.config.path.startsWith('/')
? route.config.path
: `/${route.config.path}`
; (this.router as any)[route.config.type.toLowerCase()](
path,
...route.config.middleware.map(fn => AsyncHandler(fn)),
AsyncHandler(async (request, response, next) => {
await this.authorizeResolver(request as any, route)
return next()
}),
AsyncHandler(async (request, response) =>
route.config.handler(request, response)
)
)
}
)
app.use(`/${this.config.path}`, this.router)
app.get(`/${this.config.path}(/*)?`, async (request, response) => {
const sortedScripts = request.scripts
.filter(script => !script.chunk) // Only non-chunk scripts will be mounted. The chunked scripts will be fetched by webpack during load time.
.sort(script => (baseScripts.includes(script.name) ? -1 : 0))
response.send(
Mustache.render(indexFileContent, {
styles: request.styles,
scripts: sortedScripts,
// @ts-ignore
user: request.user
? JSON.stringify({
// @ts-ignore
...request.user
})
: null,
resources: JSON.stringify(
request.config.resources.map(r => r.serialize())
),
ctx: JSON.stringify({
name: request.config.name,
dashboardPath: this.config.path,
apiPath: `/${this.config.path}/api`,
serverUrl: request.config.serverUrl,
pluginsConfig: currentCtx().pluginsConfig
}),
shouldShowRegistrationScreen:
(await request.manager.count(
this.resources.user.data.pascalCaseName
)) === 0
})
)
})
app.use(
'/tensei-assets',
Static(Path.resolve(__dirname, '..', 'default-assets'))
)
})
}
}
export const cms = () => new CmsPlugin() | the_stack |
import { ethers, network, upgrades, waffle } from "hardhat";
import { Signer } from "ethers";
import chai from "chai";
import { solidity } from "ethereum-waffle";
import "@openzeppelin/test-helpers";
import {
MockERC20,
MockERC20__factory,
MdexFactory,
MdexFactory__factory,
MdexPair,
MdexPair__factory,
MdexRouter__factory,
MdexRouter,
WETH,
WETH__factory,
WNativeRelayer__factory,
MdexRestrictedStrategyPartialCloseMinimizeTrading,
MdexRestrictedStrategyPartialCloseMinimizeTrading__factory,
SwapMining,
Oracle,
MockMdexWorker__factory,
MockMdexWorker,
Oracle__factory,
SwapMining__factory,
} from "../../../../../typechain";
import * as TestHelpers from "../../../../helpers/assert";
import { MdxToken } from "../../../../../typechain/MdxToken";
import { MdxToken__factory } from "../../../../../typechain/factories/MdxToken__factory";
import * as TimeHelpers from "../../../../helpers/time";
chai.use(solidity);
const { expect } = chai;
describe("MdexRestrictedStrategyPartialCloseMinimizeTrading", () => {
const FOREVER = "2000000000";
const mdxPerBlock = "51600000000000000000";
/// Mdex-related instance(s)
let factory: MdexFactory;
let router: MdexRouter;
let lp: MdexPair;
let baseTokenWbnbLp: MdexPair;
let swapMining: SwapMining;
let oracle: Oracle;
/// MockMdexWorker-related instance(s)
let mockMdexWorker: MockMdexWorker;
let mockMdexEvilWorker: MockMdexWorker;
let mockMdexBaseTokenWbnbWorker: MockMdexWorker;
/// Token-related instance(s)
let mdxToken: MdxToken;
let wbnb: WETH;
let baseToken: MockERC20;
let farmingToken: MockERC20;
// available pool Ids
let pIds: number[];
/// Strategy instance(s)
let strat: MdexRestrictedStrategyPartialCloseMinimizeTrading;
// Accounts
let deployer: Signer;
let alice: Signer;
let bob: Signer;
let deployerAddress: string;
let aliceAddress: string;
let bobAddress: string;
// Contract Signer
let baseTokenAsAlice: MockERC20;
let baseTokenAsBob: MockERC20;
let baseTokenWbnbLpAsBob: MdexPair;
let lpAsAlice: MdexPair;
let lpAsBob: MdexPair;
let farmingTokenAsAlice: MockERC20;
let farmingTokenAsBob: MockERC20;
let routerAsAlice: MdexRouter;
let routerAsBob: MdexRouter;
let stratAsAlice: MdexRestrictedStrategyPartialCloseMinimizeTrading;
let stratAsBob: MdexRestrictedStrategyPartialCloseMinimizeTrading;
let mockMdexWorkerAsBob: MockMdexWorker;
let mockMdexEvilWorkerAsBob: MockMdexWorker;
let mockMdexBaseTokenWbnbWorkerAsBob: MockMdexWorker;
let wbnbAsAlice: WETH;
let wbnbAsBob: WETH;
async function fixture() {
[deployer, alice, bob] = await ethers.getSigners();
[deployerAddress, aliceAddress, bobAddress] = await Promise.all([
deployer.getAddress(),
alice.getAddress(),
bob.getAddress(),
]);
/// Setup token stuffs
const MockERC20 = (await ethers.getContractFactory("MockERC20", deployer)) as MockERC20__factory;
baseToken = (await upgrades.deployProxy(MockERC20, ["BTOKEN", "BTOKEN", 18])) as MockERC20;
await baseToken.deployed();
await baseToken.mint(aliceAddress, ethers.utils.parseEther("2"));
await baseToken.mint(bobAddress, ethers.utils.parseEther("2"));
farmingToken = (await upgrades.deployProxy(MockERC20, ["FTOKEN", "FTOKEN", 18])) as MockERC20;
await farmingToken.deployed();
await farmingToken.mint(aliceAddress, ethers.utils.parseEther("40"));
await farmingToken.mint(bobAddress, ethers.utils.parseEther("40"));
const WBNB = (await ethers.getContractFactory("WETH", deployer)) as WETH__factory;
wbnb = await WBNB.deploy();
// Setup Mdex
const MdxToken = (await ethers.getContractFactory("MdxToken", deployer)) as MdxToken__factory;
mdxToken = await MdxToken.deploy();
await mdxToken.addMinter(await deployer.getAddress());
await mdxToken.mint(await deployer.getAddress(), ethers.utils.parseEther("100"));
const MdexFactory = (await ethers.getContractFactory("MdexFactory", deployer)) as MdexFactory__factory;
factory = await MdexFactory.deploy(deployerAddress);
await factory.deployed();
const MdexRouter = (await ethers.getContractFactory("MdexRouter", deployer)) as MdexRouter__factory;
router = await MdexRouter.deploy(factory.address, wbnb.address);
await router.deployed();
const Oracle = (await ethers.getContractFactory("Oracle", deployer)) as Oracle__factory;
oracle = await Oracle.deploy(factory.address);
await oracle.deployed();
const blockNumber = await TimeHelpers.latestBlockNumber();
const SwapMining = (await ethers.getContractFactory("SwapMining", deployer)) as SwapMining__factory;
swapMining = await SwapMining.deploy(
mdxToken.address,
factory.address,
oracle.address,
router.address,
baseToken.address,
mdxPerBlock,
blockNumber
);
await swapMining.deployed();
await router.setSwapMining(swapMining.address);
await factory.createPair(baseToken.address, farmingToken.address);
await factory.createPair(baseToken.address, wbnb.address);
lp = MdexPair__factory.connect(await factory.getPair(farmingToken.address, baseToken.address), deployer);
baseTokenWbnbLp = MdexPair__factory.connect(await factory.getPair(wbnb.address, baseToken.address), deployer);
await factory.addPair(lp.address);
await factory.addPair(baseTokenWbnbLp.address);
await factory.setPairFees(lp.address, 25);
await factory.setPairFees(baseTokenWbnbLp.address, 25);
await mdxToken.addMinter(swapMining.address);
await swapMining.addWhitelist(baseToken.address);
await swapMining.addWhitelist(farmingToken.address);
await swapMining.addWhitelist(wbnb.address);
await swapMining.addPair(100, lp.address, false); // pid = 0
await swapMining.addPair(0, baseTokenWbnbLp.address, false); // pid = 1 , no trading reward alloc point
pIds = [0, 1];
/// Setup MockMdexWorker
const MockMdexWorker = (await ethers.getContractFactory("MockMdexWorker", deployer)) as MockMdexWorker__factory;
mockMdexWorker = (await MockMdexWorker.deploy(
lp.address,
baseToken.address,
farmingToken.address
)) as MockMdexWorker;
await mockMdexWorker.deployed();
mockMdexEvilWorker = (await MockMdexWorker.deploy(
lp.address,
baseToken.address,
farmingToken.address
)) as MockMdexWorker;
await mockMdexEvilWorker.deployed();
mockMdexBaseTokenWbnbWorker = (await MockMdexWorker.deploy(
baseTokenWbnbLp.address,
baseToken.address,
wbnb.address
)) as MockMdexWorker;
await mockMdexBaseTokenWbnbWorker.deployed();
/// Setup WNativeRelayer
const WNativeRelayer = (await ethers.getContractFactory("WNativeRelayer", deployer)) as WNativeRelayer__factory;
const wNativeRelayer = await WNativeRelayer.deploy(wbnb.address);
await wNativeRelayer.deployed();
const MdexRestrictedStrategyPartialCloseMinimizeTrading = (await ethers.getContractFactory(
"MdexRestrictedStrategyPartialCloseMinimizeTrading",
deployer
)) as MdexRestrictedStrategyPartialCloseMinimizeTrading__factory;
strat = (await upgrades.deployProxy(MdexRestrictedStrategyPartialCloseMinimizeTrading, [
router.address,
wbnb.address,
wNativeRelayer.address,
mdxToken.address,
])) as MdexRestrictedStrategyPartialCloseMinimizeTrading;
await strat.deployed();
await strat.setWorkersOk([mockMdexWorker.address, mockMdexBaseTokenWbnbWorker.address], true);
await wNativeRelayer.setCallerOk([strat.address], true);
// Assign contract signer
baseTokenAsAlice = MockERC20__factory.connect(baseToken.address, alice);
baseTokenAsBob = MockERC20__factory.connect(baseToken.address, bob);
baseTokenWbnbLpAsBob = MdexPair__factory.connect(baseTokenWbnbLp.address, bob);
farmingTokenAsAlice = MockERC20__factory.connect(farmingToken.address, alice);
farmingTokenAsBob = MockERC20__factory.connect(farmingToken.address, bob);
routerAsAlice = MdexRouter__factory.connect(router.address, alice);
routerAsBob = MdexRouter__factory.connect(router.address, bob);
lpAsAlice = MdexPair__factory.connect(lp.address, alice);
lpAsBob = MdexPair__factory.connect(lp.address, bob);
stratAsAlice = MdexRestrictedStrategyPartialCloseMinimizeTrading__factory.connect(strat.address, alice);
stratAsBob = MdexRestrictedStrategyPartialCloseMinimizeTrading__factory.connect(strat.address, bob);
mockMdexWorkerAsBob = MockMdexWorker__factory.connect(mockMdexWorker.address, bob);
mockMdexEvilWorkerAsBob = MockMdexWorker__factory.connect(mockMdexEvilWorker.address, bob);
mockMdexBaseTokenWbnbWorkerAsBob = MockMdexWorker__factory.connect(mockMdexBaseTokenWbnbWorker.address, bob);
wbnbAsAlice = WETH__factory.connect(wbnb.address, alice);
wbnbAsBob = WETH__factory.connect(wbnb.address, bob);
// Set block base fee per gas to 0
await network.provider.send("hardhat_setNextBlockBaseFeePerGas", ["0x0"]);
}
beforeEach(async () => {
await waffle.loadFixture(fixture);
});
context("when the setOkWorkers caller is not an owner", async () => {
it("should be reverted", async () => {
await expect(stratAsBob.setWorkersOk([mockMdexEvilWorkerAsBob.address], true)).to.revertedWith(
"Ownable: caller is not the owner"
);
});
});
context("when the withdrawTradingRewards caller is not an owner", async () => {
it("should be reverted", async () => {
await expect(stratAsBob.withdrawTradingRewards(bobAddress)).to.revertedWith("Ownable: caller is not the owner");
});
});
context("when non-worker call the strat", async () => {
it("should revert", async () => {
await expect(
stratAsBob.execute(
bobAddress,
"0",
ethers.utils.defaultAbiCoder.encode(
["uint256", "uint256", "uint256"],
[ethers.utils.parseEther("0.5"), ethers.utils.parseEther("0.5"), ethers.utils.parseEther("0.5")]
)
)
).to.revertedWith("MdexRestrictedStrategyPartialCloseMinimizeTrading::onlyWhitelistedWorkers:: bad worker");
});
});
context("when caller worker hasn't been whitelisted", async () => {
it("should revert as bad worker", async () => {
await expect(
mockMdexEvilWorkerAsBob.work(
0,
bobAddress,
"0",
ethers.utils.defaultAbiCoder.encode(
["address", "bytes"],
[
strat.address,
ethers.utils.defaultAbiCoder.encode(
["uint256", "uint256", "uint256"],
[ethers.utils.parseEther("0.5"), ethers.utils.parseEther("0.5"), ethers.utils.parseEther("0.5")]
),
]
)
)
).to.revertedWith("MdexRestrictedStrategyPartialCloseMinimizeTrading::onlyWhitelistedWorkers:: bad worker");
});
});
context("when revoking whitelist workers", async () => {
it("should revert as bad worker", async () => {
await strat.setWorkersOk([mockMdexWorker.address], false);
await expect(
mockMdexWorkerAsBob.work(
0,
bobAddress,
"0",
ethers.utils.defaultAbiCoder.encode(
["address", "bytes"],
[
strat.address,
ethers.utils.defaultAbiCoder.encode(
["uint256", "uint256", "uint256"],
[ethers.utils.parseEther("0.5"), ethers.utils.parseEther("0.5"), ethers.utils.parseEther("0.5")]
),
]
)
)
).to.revertedWith("MdexRestrictedStrategyPartialCloseMinimizeTrading::onlyWhitelistedWorkers:: bad worker");
});
});
context("when bad calldata", async () => {
it("should revert", async () => {
await expect(mockMdexWorkerAsBob.work(0, bobAddress, "0", "0x1234")).to.reverted;
});
});
context("when farming token is NOT WBNB", async () => {
beforeEach(async () => {
// Alice adds 40 FTOKEN + 2 BaseToken
await baseTokenAsAlice.approve(router.address, ethers.utils.parseEther("2"));
await farmingTokenAsAlice.approve(router.address, ethers.utils.parseEther("40"));
await routerAsAlice.addLiquidity(
baseToken.address,
farmingToken.address,
ethers.utils.parseEther("2"),
ethers.utils.parseEther("40"),
"0",
"0",
aliceAddress,
FOREVER
);
// Bob adds 40 FTOKEN + 2 BaseToken
await baseTokenAsBob.approve(router.address, ethers.utils.parseEther("2"));
await farmingTokenAsBob.approve(router.address, ethers.utils.parseEther("40"));
await routerAsBob.addLiquidity(
baseToken.address,
farmingToken.address,
ethers.utils.parseEther("2"),
ethers.utils.parseEther("40"),
"0",
"0",
bobAddress,
FOREVER
);
await lpAsBob.transfer(strat.address, ethers.utils.parseEther("8.944271909999158785"));
});
context("when maxLpTokenToLiquidate > LP from worker", async () => {
it("should use all LP", async () => {
// debt: 1 BTOKEN
// LP token to liquidate:
// Math.min(888, 8.944271909999158785) = 8.944271909999158785 LP (40 FTOKEN + 2 BTOKEN)
// maxReturnDebt: 888 base token
const bobBaseTokenBefore = await baseToken.balanceOf(bobAddress);
const bobFTOKENBefore = await farmingToken.balanceOf(bobAddress);
await expect(
mockMdexWorkerAsBob.work(
0,
bobAddress,
ethers.utils.parseEther("1"),
ethers.utils.defaultAbiCoder.encode(
["address", "bytes"],
[
strat.address,
ethers.utils.defaultAbiCoder.encode(
["uint256", "uint256", "uint256"],
[ethers.utils.parseEther("888"), ethers.utils.parseEther("888"), ethers.utils.parseEther("40")]
),
]
)
)
)
.to.emit(strat, "MdexRestrictedStrategyPartialCloseMinimizeTradingEvent")
.withArgs(
baseToken.address,
farmingToken.address,
ethers.utils.parseEther("8.944271909999158785"),
ethers.utils.parseEther("1")
);
const bobBaseTokenAfter = await baseToken.balanceOf(bobAddress);
const bobFTOKENAfter = await farmingToken.balanceOf(bobAddress);
expect(await lp.balanceOf(strat.address)).to.be.eq(ethers.utils.parseEther("0"));
expect(await lp.balanceOf(bobAddress)).to.be.eq(ethers.utils.parseEther("0"));
expect(bobBaseTokenAfter.sub(bobBaseTokenBefore), "Bob (as Vault) should get 2 BTOKEN back").to.be.eq(
ethers.utils.parseEther("2")
);
expect(bobFTOKENAfter.sub(bobFTOKENBefore), "Bob should get 40 FTOKEN back").to.be.eq(
ethers.utils.parseEther("40")
);
const mdxBefore = await mdxToken.balanceOf(deployerAddress);
// withdraw trading reward to deployer
const withDrawTx = await strat.withdrawTradingRewards(deployerAddress);
const mdxAfter = await mdxToken.balanceOf(deployerAddress);
// get trading reward of the previos block
const totalRewardPrev = await strat.getMiningRewards(pIds, { blockTag: Number(withDrawTx.blockNumber) - 1 });
const withDrawBlockReward = await swapMining["reward()"]({ blockTag: withDrawTx.blockNumber });
const totalReward = !totalRewardPrev.isZero() ? totalRewardPrev.add(withDrawBlockReward) : 0;
expect(mdxAfter.sub(mdxBefore)).to.eq(totalReward);
});
});
context("when maxReturnDebt > debt", async () => {
it("should return all debt", async () => {
// debt: 1 BTOKEN
// LP token to liquidate: 4.472135954999579392 LP (20 FTOKEN + 1 BTOKEN)
// maxReturnDebt: 888 base token
const bobBaseTokenBefore = await baseToken.balanceOf(bobAddress);
const bobFTOKENBefore = await farmingToken.balanceOf(bobAddress);
await expect(
mockMdexWorkerAsBob.work(
0,
bobAddress,
ethers.utils.parseEther("1"),
ethers.utils.defaultAbiCoder.encode(
["address", "bytes"],
[
strat.address,
ethers.utils.defaultAbiCoder.encode(
["uint256", "uint256", "uint256"],
[
ethers.utils.parseEther("4.472135954999579393"),
ethers.utils.parseEther("888"),
ethers.utils.parseEther("19.2"),
]
),
]
)
)
)
.to.emit(strat, "MdexRestrictedStrategyPartialCloseMinimizeTradingEvent")
.withArgs(
baseToken.address,
farmingToken.address,
ethers.utils.parseEther("4.472135954999579393"),
ethers.utils.parseEther("1")
);
// remove liquidity 50%: 4.472135954999579393 LP token (20 FTOKEN + 1 BTOKEN)
// no trade
const bobBaseTokenAfter = await baseToken.balanceOf(bobAddress);
const bobFTOKENAfter = await farmingToken.balanceOf(bobAddress);
expect(await lp.balanceOf(strat.address)).to.be.eq(ethers.utils.parseEther("0"));
expect(await lp.balanceOf(bobAddress)).to.be.eq(ethers.utils.parseEther("0"));
expect(bobBaseTokenAfter.sub(bobBaseTokenBefore), "Bob (as Vault) should get 1 BTOKEN back").to.be.eq(
ethers.utils.parseEther("1")
);
expect(bobFTOKENAfter.sub(bobFTOKENBefore), "Bob should get 20.000000000000000002 FTOKEN back").to.be.eq(
ethers.utils.parseEther("20.000000000000000002")
);
const mdxBefore = await mdxToken.balanceOf(deployerAddress);
// withdraw trading reward to deployer
const withDrawTx = await strat.withdrawTradingRewards(deployerAddress);
const mdxAfter = await mdxToken.balanceOf(deployerAddress);
// get trading reward of the previos block
const totalRewardPrev = await strat.getMiningRewards(pIds, { blockTag: Number(withDrawTx.blockNumber) - 1 });
const withDrawBlockReward = await swapMining["reward()"]({ blockTag: withDrawTx.blockNumber });
const totalReward = !totalRewardPrev.isZero() ? totalRewardPrev.add(withDrawBlockReward) : 0;
expect(mdxAfter.sub(mdxBefore)).to.eq(totalReward);
});
});
context("when no trade (maxReturnDebt <= received BTOKEN from LP token)", async () => {
context("when farming tokens received < slippage", async () => {
it("should revert", async () => {
// LP token to liquidate: 4.472135954999579392 Lp token (20 farming token + 1 base token)
// maxReturnDebt: 0.8 base token
await expect(
mockMdexWorkerAsBob.work(
0,
bobAddress,
ethers.utils.parseEther("2"),
ethers.utils.defaultAbiCoder.encode(
["address", "bytes"],
[
strat.address,
ethers.utils.defaultAbiCoder.encode(
["uint256", "uint256", "uint256"],
[
ethers.utils.parseEther("4.472135954999579393"),
ethers.utils.parseEther("0.8"),
ethers.utils.parseEther("25"),
]
),
]
)
)
).to.revertedWith(
"MdexRestrictedStrategyPartialCloseMinimizeTrading::execute:: insufficient farming tokens received"
);
});
});
context("when farming tokens received >= slippage", async () => {
it("should success", async () => {
// LP token to liquidate: 4.472135954999579392 Lp token (20 farming token + 1 base token)
// maxReturnDebt: 0.8 base token
const bobBaseTokenBefore = await baseToken.balanceOf(bobAddress);
const bobFTOKENBefore = await farmingToken.balanceOf(bobAddress);
await expect(
mockMdexWorkerAsBob.work(
0,
bobAddress,
ethers.utils.parseEther("2"),
ethers.utils.defaultAbiCoder.encode(
["address", "bytes"],
[
strat.address,
ethers.utils.defaultAbiCoder.encode(
["uint256", "uint256", "uint256"],
[
ethers.utils.parseEther("4.472135954999579393"),
ethers.utils.parseEther("0.8"),
ethers.utils.parseEther("19.2"),
]
),
]
)
)
)
.to.emit(strat, "MdexRestrictedStrategyPartialCloseMinimizeTradingEvent")
.withArgs(
baseToken.address,
farmingToken.address,
ethers.utils.parseEther("4.472135954999579393"),
ethers.utils.parseEther("0.8")
);
// remove liquidity 50%: 4.472135954999579393 LP token (20 farming token + 1 base token)
// no trade
const bobBaseTokenAfter = await baseToken.balanceOf(bobAddress);
const bobFTOKENAfter = await farmingToken.balanceOf(bobAddress);
expect(await lp.balanceOf(strat.address)).to.be.eq(ethers.utils.parseEther("0"));
expect(await lp.balanceOf(bobAddress)).to.be.eq(ethers.utils.parseEther("0"));
TestHelpers.assertAlmostEqual(
ethers.utils.parseEther("1").toString(),
bobBaseTokenAfter.sub(bobBaseTokenBefore).toString()
);
TestHelpers.assertAlmostEqual(
ethers.utils.parseEther("20").toString(),
bobFTOKENAfter.sub(bobFTOKENBefore).toString()
);
const mdxBefore = await mdxToken.balanceOf(deployerAddress);
// withdraw trading reward to deployer
const withDrawTx = await strat.withdrawTradingRewards(deployerAddress);
const mdxAfter = await mdxToken.balanceOf(deployerAddress);
// get trading reward of the previos block
const totalRewardPrev = await strat.getMiningRewards(pIds, { blockTag: Number(withDrawTx.blockNumber) - 1 });
const withDrawBlockReward = await swapMining["reward()"]({ blockTag: withDrawTx.blockNumber });
const totalReward = !totalRewardPrev.isZero() ? totalRewardPrev.add(withDrawBlockReward) : 0;
expect(mdxAfter.sub(mdxBefore)).to.eq(totalReward);
});
});
});
context("when some trade (maxReturnDebt > received BTOKEN from LP)", async () => {
context(
"when FTOKEN not enough to cover maxReturnDebt (maxReturnDebt > (BtokenFromLp + BtokenFromSellFtoken))",
async () => {
// LP token to liquidate: 0.894427190999915878 Lp token (4 FTOKEN + 0.2 BTOKEN) ~ 0.4 BTOKEN
// maxReturnDebt: 0.5 BTOKEN
it("should revert", async () => {
await expect(
mockMdexWorkerAsBob.work(
0,
bobAddress,
ethers.utils.parseEther("2"),
ethers.utils.defaultAbiCoder.encode(
["address", "bytes"],
[
strat.address,
ethers.utils.defaultAbiCoder.encode(
["uint256", "uint256", "uint256"],
[
ethers.utils.parseEther("0.894427190999915878"),
ethers.utils.parseEther("0.5"),
ethers.utils.parseEther("0"),
]
),
]
)
)
).to.revertedWith("MdexRouter: EXCESSIVE_INPUT_AMOUNT");
});
}
);
context("when farming tokens received < slippage", async () => {
// LP token to liquidate: 0.894427190999915878 Lp token (4 farming token + 0.2 base token)
// maxReturnDebt: 0.24 base token
it("should revert", async () => {
await expect(
mockMdexWorkerAsBob.work(
0,
bobAddress,
ethers.utils.parseEther("2"),
ethers.utils.defaultAbiCoder.encode(
["address", "bytes"],
[
strat.address,
ethers.utils.defaultAbiCoder.encode(
["uint256", "uint256", "uint256"],
[
ethers.utils.parseEther("0.894427190999915878"),
ethers.utils.parseEther("0.24"),
ethers.utils.parseEther("3.2"),
]
),
]
)
)
).to.revertedWith(
"MdexRestrictedStrategyPartialCloseMinimizeTrading::execute:: insufficient farming tokens received"
);
});
});
context("when farming tokens received >= slippage", async () => {
// LP token to liquidate: 0.894427190999915878 Lp token (4 FTOKEN + 0.2 BTOKEN)
// maxReturnDebt: 0.24 BTOKEN
it("should be successfully", async () => {
const bobBaseTokenBefore = await baseToken.balanceOf(bobAddress);
const bobFTOKENBefore = await farmingToken.balanceOf(bobAddress);
await expect(
mockMdexWorkerAsBob.work(
0,
bobAddress,
ethers.utils.parseEther("2"),
ethers.utils.defaultAbiCoder.encode(
["address", "bytes"],
[
strat.address,
ethers.utils.defaultAbiCoder.encode(
["uint256", "uint256", "uint256"],
[
ethers.utils.parseEther("0.894427190999915878"),
ethers.utils.parseEther("0.24"),
ethers.utils.parseEther("3.168"),
]
),
]
)
)
)
.to.emit(strat, "MdexRestrictedStrategyPartialCloseMinimizeTradingEvent")
.withArgs(
baseToken.address,
farmingToken.address,
ethers.utils.parseEther("0.894427190999915878"),
ethers.utils.parseEther("0.24")
);
// remove liquidity 10%: 0.894427190999915878 LP token (4 farming token + 0.2 base token)
// trade
// exactIn = (exactOut * reserveIn * 10000) / (tradingFee * (reserveOut - exactOut))
// exactIn = (0.04 * 76 * 10000) / (9975 * (3.8 - 0.04))
// exactIn = 0.810536980749747
// remainingFarmingToken = 4 - 0.810536980749747 = 3.189463019250253
const bobBaseTokenAfter = await baseToken.balanceOf(bobAddress);
const bobFTOKENAfter = await farmingToken.balanceOf(bobAddress);
expect(await lp.balanceOf(strat.address)).to.be.eq(ethers.utils.parseEther("0"));
expect(await lp.balanceOf(bobAddress)).to.be.eq(ethers.utils.parseEther("0"));
TestHelpers.assertAlmostEqual(
ethers.utils.parseEther("0.24").toString(),
bobBaseTokenAfter.sub(bobBaseTokenBefore).toString()
);
TestHelpers.assertAlmostEqual(
ethers.utils.parseEther("3.189463019250253").toString(),
bobFTOKENAfter.sub(bobFTOKENBefore).toString()
);
const mdxBefore = await mdxToken.balanceOf(deployerAddress);
// withdraw trading reward to deployer
const withDrawTx = await strat.withdrawTradingRewards(deployerAddress);
const mdxAfter = await mdxToken.balanceOf(deployerAddress);
// get trading reward of the previos block
const totalRewardPrev = await strat.getMiningRewards(pIds, { blockTag: Number(withDrawTx.blockNumber) - 1 });
const withDrawBlockReward = await swapMining["reward()"]({ blockTag: withDrawTx.blockNumber });
const totalReward = !totalRewardPrev.isZero() ? totalRewardPrev.add(withDrawBlockReward) : 0;
expect(mdxAfter.sub(mdxBefore)).to.eq(totalReward);
});
});
});
});
context("when the farming token is WBNB", () => {
beforeEach(async () => {
// Alice wrap BNB
await wbnbAsAlice.deposit({ value: ethers.utils.parseEther("0.1") });
// Alice adds 0.1 WBNB + 1 BaseToken
await baseTokenAsAlice.approve(router.address, ethers.utils.parseEther("1"));
await wbnbAsAlice.approve(router.address, ethers.utils.parseEther("0.1"));
await routerAsAlice.addLiquidity(
baseToken.address,
wbnb.address,
ethers.utils.parseEther("1"),
ethers.utils.parseEther("0.1"),
"0",
"0",
aliceAddress,
FOREVER
);
// Bob wrap BNB
await wbnbAsBob.deposit({ value: ethers.utils.parseEther("1") });
// Bob tries to add 1 WBNB + 1 BaseToken (but obviously can only add 0.1 WBNB)
await baseTokenAsBob.approve(router.address, ethers.utils.parseEther("1"));
await wbnbAsBob.approve(router.address, ethers.utils.parseEther("1"));
await routerAsBob.addLiquidity(
baseToken.address,
wbnb.address,
ethers.utils.parseEther("1"),
ethers.utils.parseEther("1"),
"0",
"0",
bobAddress,
FOREVER
);
expect(await wbnb.balanceOf(bobAddress)).to.be.eq(ethers.utils.parseEther("0.9"));
expect(await baseTokenWbnbLp.balanceOf(bobAddress)).to.be.eq(ethers.utils.parseEther("0.316227766016837933"));
await baseTokenWbnbLpAsBob.transfer(
mockMdexBaseTokenWbnbWorker.address,
ethers.utils.parseEther("0.316227766016837933")
);
});
context("when maxLpTokenToLiquiate > LP from worker", async () => {
it("should use all LP", async () => {
// debt: 0.5 BTOKEN
// LP token to liquidate:
// Math.min(888, 0.316227766016837933) = 0.316227766016837933 LP (0.1 BNB + 1 BTOKEN)
// maxReturnDebt: 888 base token
const bobBaseTokenBefore = await baseToken.balanceOf(bobAddress);
const bobBnbBefore = await bob.getBalance();
await expect(
mockMdexBaseTokenWbnbWorkerAsBob.work(
0,
bobAddress,
ethers.utils.parseEther("0.5"),
ethers.utils.defaultAbiCoder.encode(
["address", "bytes"],
[
strat.address,
ethers.utils.defaultAbiCoder.encode(
["uint256", "uint256", "uint256"],
[ethers.utils.parseEther("888"), ethers.utils.parseEther("888"), ethers.utils.parseEther("0.1")]
),
]
),
{ gasPrice: 0 }
)
)
.to.emit(strat, "MdexRestrictedStrategyPartialCloseMinimizeTradingEvent")
.withArgs(
baseToken.address,
wbnb.address,
ethers.utils.parseEther("0.316227766016837933"),
ethers.utils.parseEther("0.5")
);
// no trade
const bobBaseTokenAfter = await baseToken.balanceOf(bobAddress);
const bobBnbAfter = await bob.getBalance();
expect(await lp.balanceOf(strat.address)).to.be.eq(ethers.utils.parseEther("0"));
expect(await lp.balanceOf(bobAddress)).to.be.eq(ethers.utils.parseEther("0"));
expect(bobBaseTokenAfter.sub(bobBaseTokenBefore), "Bob (as Vault) should get 1 BTOKEN back.").to.be.eq(
ethers.utils.parseEther("1")
);
expect(bobBnbAfter.sub(bobBnbBefore), "Bob should get 0.1 BNB back.").to.be.eq(ethers.utils.parseEther("0.1"));
const mdxBefore = await mdxToken.balanceOf(deployerAddress);
// withdraw trading reward to deployer
const withDrawTx = await strat.withdrawTradingRewards(deployerAddress);
const mdxAfter = await mdxToken.balanceOf(deployerAddress);
// no alloc point for this pair
expect(mdxAfter.sub(mdxBefore)).to.eq(0);
});
});
context("when maxReturnDebt > debt", async () => {
it("should return all debt", async () => {
// debt: 0.5 BTOKEN
// LP token to liquidate: 0.158113883008418966 LP (0.05 BNB + 0.5 BTOKEN)
// maxReturnDebt: 888 base token
const bobBaseTokenBefore = await baseToken.balanceOf(bobAddress);
const bobBnbBefore = await bob.getBalance();
await expect(
mockMdexBaseTokenWbnbWorkerAsBob.work(
0,
bobAddress,
ethers.utils.parseEther("0.5"),
ethers.utils.defaultAbiCoder.encode(
["address", "bytes"],
[
strat.address,
ethers.utils.defaultAbiCoder.encode(
["uint256", "uint256", "uint256"],
[
ethers.utils.parseEther("0.158113883008418966"),
ethers.utils.parseEther("888"),
ethers.utils.parseEther("0.0495"),
]
),
]
),
{ gasPrice: 0 }
)
)
.to.emit(strat, "MdexRestrictedStrategyPartialCloseMinimizeTradingEvent")
.withArgs(
baseToken.address,
wbnb.address,
ethers.utils.parseEther("0.158113883008418966"),
ethers.utils.parseEther("0.5")
);
// no trade
const bobBaseTokenAfter = await baseToken.balanceOf(bobAddress);
const bobBnbAfter = await bob.getBalance();
expect(await lp.balanceOf(strat.address)).to.be.eq(ethers.utils.parseEther("0"));
expect(await lp.balanceOf(bobAddress)).to.be.eq(ethers.utils.parseEther("0"));
expect(bobBaseTokenAfter.sub(bobBaseTokenBefore), "Bob (as Vault) should get 0.5 BTOKEN back.").to.be.eq(
ethers.utils.parseEther("0.5")
);
expect(bobBnbAfter.sub(bobBnbBefore), "Bob should get 0.049999999999999998 BNB back.").to.be.eq(
ethers.utils.parseEther("0.049999999999999998")
);
const mdxBefore = await mdxToken.balanceOf(deployerAddress);
// withdraw trading reward to deployer
const withDrawTx = await strat.withdrawTradingRewards(deployerAddress);
const mdxAfter = await mdxToken.balanceOf(deployerAddress);
// no alloc point for this pair
expect(mdxAfter.sub(mdxBefore)).to.eq(0);
});
});
context("when no trade (maxReturnDebt <= received BTOKEN from LP token)", async () => {
context("when farming tokens received < slippage", async () => {
it("should revert", async () => {
// LP token to liquidate: 0.158113883008418966 Lp token (0.05 FTOKEN + 0.5 BTOKEN)
// maxReturnDebt: 0.1 base token
await expect(
mockMdexBaseTokenWbnbWorkerAsBob.work(
0,
bobAddress,
ethers.utils.parseEther("1"),
ethers.utils.defaultAbiCoder.encode(
["address", "bytes"],
[
strat.address,
ethers.utils.defaultAbiCoder.encode(
["uint256", "uint256", "uint256"],
[
ethers.utils.parseEther("0.158113883008418966"),
ethers.utils.parseEther("0.1"),
ethers.utils.parseEther("0.5"),
]
),
]
)
)
).to.revertedWith(
"MdexRestrictedStrategyPartialCloseMinimizeTrading::execute:: insufficient farming tokens received"
);
});
});
context("when farming tokens received >= slippage", async () => {
it("should success", async () => {
// LP token to liquidate: 0.158113883008418966 Lp token (0.05 FTOKEN + 0.5 BTOKEN)
// maxReturnDebt: 0.1 BTOKEN
const bobBaseTokenBefore = await baseToken.balanceOf(bobAddress);
const bobBnbBefore = await ethers.provider.getBalance(bobAddress);
await expect(
mockMdexBaseTokenWbnbWorkerAsBob.work(
0,
bobAddress,
ethers.utils.parseEther("1"),
ethers.utils.defaultAbiCoder.encode(
["address", "bytes"],
[
strat.address,
ethers.utils.defaultAbiCoder.encode(
["uint256", "uint256", "uint256"],
[
ethers.utils.parseEther("0.158113883008418966"),
ethers.utils.parseEther("0.1"),
ethers.utils.parseEther("0.0495"),
]
),
]
),
{ gasPrice: 0 }
)
)
.to.emit(strat, "MdexRestrictedStrategyPartialCloseMinimizeTradingEvent")
.withArgs(
baseToken.address,
wbnb.address,
ethers.utils.parseEther("0.158113883008418966"),
ethers.utils.parseEther("0.1")
);
// remove liquidity 50%: 0.158113883008418966 LP token (0.05 farming token + 0.5 base token)
// no trade
const bobBaseTokenAfter = await baseToken.balanceOf(bobAddress);
const bobBnbAfter = await ethers.provider.getBalance(bobAddress);
expect(await lp.balanceOf(strat.address)).to.be.eq(ethers.utils.parseEther("0"));
expect(await lp.balanceOf(bobAddress)).to.be.eq(ethers.utils.parseEther("0"));
TestHelpers.assertAlmostEqual(
ethers.utils.parseEther("0.5").toString(),
bobBaseTokenAfter.sub(bobBaseTokenBefore).toString()
);
TestHelpers.assertAlmostEqual(
ethers.utils.parseEther("0.05").toString(),
bobBnbAfter.sub(bobBnbBefore).toString()
);
const mdxBefore = await mdxToken.balanceOf(deployerAddress);
// withdraw trading reward to deployer
const withDrawTx = await strat.withdrawTradingRewards(deployerAddress);
const mdxAfter = await mdxToken.balanceOf(deployerAddress);
// no alloc point for this pair
expect(mdxAfter.sub(mdxBefore)).to.eq(0);
});
});
});
context("when some trade (maxReturnDebt > received BTOKEN from LP)", async () => {
context(
"when FTOKEN not enough to cover maxReturnDebt (maxReturnDebt > (BtokenFromLp + BtokenFromSellFtoken))",
async () => {
// LP token to liquidate: 0.158113883008418966 Lp token (0.05 FTOKEN + 0.5 BTOKEN)
// maxReturnDebt: 1 BTOKEN
it("should be revert", async () => {
await expect(
mockMdexBaseTokenWbnbWorkerAsBob.work(
0,
bobAddress,
ethers.utils.parseEther("1"),
ethers.utils.defaultAbiCoder.encode(
["address", "bytes"],
[
strat.address,
ethers.utils.defaultAbiCoder.encode(
["uint256", "uint256", "uint256"],
[
ethers.utils.parseEther("0.158113883008418966"),
ethers.utils.parseEther("1"),
ethers.utils.parseEther("0.0495"),
]
),
]
),
{ gasPrice: 0 }
)
).to.revertedWith("MdexRouter: EXCESSIVE_INPUT_AMOUNT");
});
}
);
context("when farming tokens received < slippage", async () => {
it("should revert", async () => {
// LP token to liquidate: 0.158113883008418966 Lp token (0.05 FTOKEN + 0.5 BTOKEN)
// maxReturnDebt: 1 BTOKEN
await expect(
mockMdexBaseTokenWbnbWorkerAsBob.work(
0,
bobAddress,
ethers.utils.parseEther("1"),
ethers.utils.defaultAbiCoder.encode(
["address", "bytes"],
[
strat.address,
ethers.utils.defaultAbiCoder.encode(
["uint256", "uint256", "uint256"],
[
ethers.utils.parseEther("0.158113883008418966"),
ethers.utils.parseEther("0.6"),
ethers.utils.parseEther("0.4"),
]
),
]
),
{ gasPrice: 0 }
)
).to.revertedWith(
"MdexRestrictedStrategyPartialCloseMinimizeTrading::execute:: insufficient farming tokens received"
);
});
});
context("when farming tokens received >= slippage", async () => {
it("should be successfully", async () => {
// LP token to liquidate: 0.158113883008418966 Lp token (0.05 FTOKEN + 0.5 BTOKEN)
// maxReturnDebt: 1 BTOKEN
const bobBaseTokenBefore = await baseToken.balanceOf(bobAddress);
const bobBnbBefore = await ethers.provider.getBalance(bobAddress);
await expect(
mockMdexBaseTokenWbnbWorkerAsBob.work(
0,
bobAddress,
ethers.utils.parseEther("1"),
ethers.utils.defaultAbiCoder.encode(
["address", "bytes"],
[
strat.address,
ethers.utils.defaultAbiCoder.encode(
["uint256", "uint256", "uint256"],
[
ethers.utils.parseEther("0.158113883008418966"),
ethers.utils.parseEther("0.6"),
ethers.utils.parseEther("0.037"),
]
),
]
),
{ gasPrice: 0 }
)
)
.to.emit(strat, "MdexRestrictedStrategyPartialCloseMinimizeTradingEvent")
.withArgs(
baseToken.address,
wbnb.address,
ethers.utils.parseEther("0.158113883008418966"),
ethers.utils.parseEther("0.6")
);
// remove liquidity 50%: 0.158113883008418966 LP token (0.05 farming token + 0.5 base token)
// trade
// exactIn = (exactOut * reserveIn * 10000) / (tradingFee * (reserveOut - exactOut))
// exactIn = (0.1 * 0.15 * 10000) / (9975 * (1.5 - 0.1))
// exactIn = 0.010741138560687433
// remainingFarmingToken = 0.05 - 0.010741138560687433 = 0.03925886143931257
const bobBaseTokenAfter = await baseToken.balanceOf(bobAddress);
const bobBnbAfter = await ethers.provider.getBalance(bobAddress);
expect(await lp.balanceOf(strat.address)).to.be.eq(ethers.utils.parseEther("0"));
expect(await lp.balanceOf(bobAddress)).to.be.eq(ethers.utils.parseEther("0"));
TestHelpers.assertAlmostEqual(
ethers.utils.parseEther("0.6").toString(),
bobBaseTokenAfter.sub(bobBaseTokenBefore).toString()
);
TestHelpers.assertAlmostEqual(
ethers.utils.parseEther("0.03925886143931257").toString(),
bobBnbAfter.sub(bobBnbBefore).toString()
);
const mdxBefore = await mdxToken.balanceOf(deployerAddress);
// withdraw trading reward to deployer
const withDrawTx = await strat.withdrawTradingRewards(deployerAddress);
const mdxAfter = await mdxToken.balanceOf(deployerAddress);
// no alloc point for this pair
expect(mdxAfter.sub(mdxBefore)).to.eq(0);
});
});
});
});
}); | the_stack |
const COLORS = {
grey_7: '#898989',
brick_20: '#B77B6A',
brick_21: '#8B574F',
brick_22: '#392421',
lime_8: '#7DC240',
lime_9: '#76B73D',
lime_4: '#CFEDB5',
lime_5: '#B6E490',
lime_6: '#9EDB6B',
lime_7: '#8ECF56',
lime_1: '#F3FBED',
lime_2: '#EBF8E1',
lime_3: '#E3F5D4',
orange_22: '#32221D',
orange_20: '#DDA37C',
orange_21: '#A2705B',
stone_9: '#716852',
stone_8: '#776E57',
stone_20: '#4D4A44',
stone_1: '#F3F2EF',
amber_22: '#32251D',
stone_21: '#282623',
amber_20: '#DDB27C',
amber_21: '#A2785B',
navy_17: '#CAD1E6',
navy_16: '#18264D',
navy_15: '#132559',
navy_14: '#162A65',
navy_13: '#1A3177',
navy_12: '#1E3788',
navy_11: '#223F9A',
navy_10: '#2647AC',
navy_19: '#7989B7',
navy_18: '#9CAAD3',
teal_9: '#1C8EB4',
teal_8: '#1E96BE',
teal_5: '#79C7E3',
teal_4: '#A6DAEC',
teal_7: '#35A6CC',
teal_6: '#4DB5D9',
teal_1: '#E9F6FA',
teal_3: '#CBE9F4',
teal_2: '#DAF0F7',
teal_19: '#73A6B7',
teal_18: '#96C3D2',
teal_11: '#17779A',
teal_10: '#1A85AB',
teal_13: '#125C77',
teal_12: '#146988',
teal_15: '#0D4559',
teal_14: '#0F4E65',
teal_17: '#C7DDE5',
teal_16: '#103B49',
gold_20: '#DDBE7C',
gold_21: '#A27E5B',
gold_22: '#32271D',
olive_9: '#B8C436',
olive_8: '#C3D039',
olive_7: '#CFDC4F',
olive_6: '#DBE765',
olive_5: '#E4ED8B',
olive_4: '#EDF3B2',
olive_3: '#F5F8D2',
olive_2: '#F8FADF',
olive_1: '#FBFCEC',
green_22: '#212A1F',
green_20: '#5A735A',
green_21: '#2F3C2F',
indigo_17: '#CCC7E4',
indigo_16: '#1B1247',
indigo_15: '#1C0F58',
indigo_14: '#201163',
indigo_13: '#261474',
indigo_12: '#2C1685',
indigo_11: '#321A97',
indigo_10: '#371DA8',
indigo_19: '#7E71B2',
indigo_18: '#A095CF',
blue_22: '#222A34',
blue_20: '#536A81',
blue_21: '#2D3946',
olive_22: '#28291C',
olive_20: '#878A5E',
olive_21: '#484A32',
violet_16: '#510E30',
violet_17: '#E0C1D8',
violet_14: '#6B083F',
violet_15: '#620736',
violet_12: '#860A5A',
violet_13: '#79094D',
violet_10: '#A40D77',
violet_11: '#950C69',
dirt_16: '#261A10',
dirt_17: '#BCB6B1',
dirt_14: '#322113',
dirt_15: '#2D1D10',
dirt_12: '#442D19',
dirt_13: '#3B2716',
violet_18: '#C78BB8',
violet_19: '#B1699C',
stone_15: '#373227',
stone_14: '#3E392D',
stone_17: '#C6C4C0',
stone_16: '#2E2A22',
stone_11: '#5F5845',
stone_10: '#6A624D',
stone_13: '#494335',
stone_12: '#544D3D',
stone_19: '#706C63',
stone_18: '#938F87',
aqua_3: '#CAF2F4',
aqua_2: '#D9F6F7',
aqua_1: '#E8FAFA',
aqua_7: '#2FC5CC',
aqua_6: '#47D3D9',
aqua_5: '#75DEE3',
aqua_4: '#A3E9EC',
aqua_9: '#15AEB4',
aqua_8: '#17B8BE',
orange_7: '#FE891A',
orange_6: '#FF9833',
orange_5: '#FFB266',
orange_4: '#FFCB99',
orange_3: '#FFE1C4',
orange_2: '#FFEAD5',
orange_1: '#FFF2E6',
orange_9: '#FA7400',
orange_8: '#FD7900',
purple_13: '#510869',
purple_11: '#670B8C',
purple_10: '#720C9D',
purple_7: '#9226BE',
purple_6: '#A13ECD',
purple_5: '#B86EDA',
purple_4: '#D09FE6',
purple_3: '#E4C7F1',
purple_2: '#ECD7F5',
purple_1: '#F3E7F9',
blue_18: '#99B6D3',
blue_17: '#C9D7E6',
blue_16: '#16314D',
blue_15: '#103459',
blue_14: '#133B65',
blue_13: '#164677',
blue_12: '#195188',
purple_9: '#7A0DA6',
blue_10: '#2067AC',
orange_13: '#D65200',
orange_12: '#E45D00',
orange_11: '#ED6600',
orange_10: '#F26C00',
orange_17: '#F8E2CC',
orange_16: '#8C330A',
orange_15: '#B93C00',
orange_14: '#C84600',
orange_19: '#F6BD8A',
orange_18: '#F8CCA1',
magenta_11: '#B51241',
magenta_10: '#C5154A',
magenta_13: '#970E2D',
magenta_12: '#A51037',
magenta_15: '#7E0A1D',
magenta_14: '#880B23',
magenta_17: '#EDCAD6',
magenta_16: '#63101D',
magenta_19: '#D2809A',
magenta_18: '#E29DB4',
stone_3: '#E2E0DA',
stone_2: '#EBE9E4',
stone_5: '#B3AD9E',
stone_4: '#CDC9BF',
stone_7: '#89806B',
stone_6: '#9A927E',
blue_9: '#226DB5',
blue_8: '#2473BD',
blue_7: '#3B85CC',
blue_6: '#5297DA',
blue_5: '#7DB1E3',
blue_4: '#A9CBED',
blue_3: '#CDE1F4',
blue_2: '#DCEAF7',
blue_1: '#EAF2FA',
gold_9: '#FAC200',
gold_8: '#FDC900',
gold_5: '#FFE466',
gold_4: '#FFEB8C',
gold_7: '#FED21A',
gold_6: '#FFDB33',
gold_1: '#FFFBE6',
gold_3: '#FFF5C4',
gold_2: '#FFF8D5',
dirt_22: '#191410',
dirt_21: '#201B17',
dirt_20: '#3D342C',
white: '#FFFFFF',
gold_19: '#F6E08A',
gold_18: '#F8E8A1',
grey_8: '#777777',
grey_9: '#717171',
gold_11: '#EDAB00',
gold_10: '#F6BA00',
gold_13: '#D68800',
gold_12: '#E49B00',
gold_15: '#B96500',
gold_14: '#C87500',
gold_17: '#F8F0CC',
gold_16: '#8C500A',
grey_10: '#6A6A6A',
grey_11: '#5F5F5F',
grey_12: '#545454',
grey_13: '#494949',
grey_14: '#3E3E3E',
grey_15: '#373636',
grey_16: '#2E2D2D',
grey_17: '#C6C6C6',
grey_18: '#939393',
grey_19: '#707070',
dirt_19: '#5B4D42',
yellow_20: '#DDC97C',
yellow_21: '#A2845B',
yellow_22: '#32291D',
dirt_10: '#573921',
dirt_11: '#4E331D',
brown_8: '#986232',
brown_9: '#905D2F',
stone_22: '#1C1A18',
brown_6: '#B7885E',
brown_7: '#A87548',
brick_1: '#FEEEE8',
brick_3: '#FCD6C8',
brick_2: '#FDE2D8',
brick_5: '#F89570',
brick_4: '#FAB8A0',
brick_7: '#EF5D28',
brick_6: '#F57141',
brick_9: '#DF4916',
brick_8: '#E7531F',
amber_7: '#FEB31A',
amber_6: '#FFBE33',
amber_5: '#FFCE66',
amber_4: '#FFDF99',
amber_3: '#FFECC4',
amber_2: '#FFF2D5',
amber_1: '#FFF7E6',
amber_9: '#FAA100',
amber_8: '#FDA700',
grey_2: '#F1F1F1',
grey_3: '#E5E5E4',
violet_22: '#301C25',
violet_21: '#51303F',
violet_20: '#7F4B6D',
grey_1: '#F8F8F9',
red_19: '#D37676',
red_18: '#E49595',
red_15: '#880000',
red_14: '#910000',
red_17: '#EEC7C7',
red_16: '#6D0A0A',
red_11: '#BB0000',
red_10: '#C90000',
red_13: '#9F0000',
red_12: '#AC0000',
black: '#000000',
grey_4: '#D6D6D5',
grey_5: '#C0C0C0',
navy_7: '#4265CC',
navy_6: '#5879DA',
navy_5: '#829AE3',
navy_4: '#ABBCED',
navy_3: '#CFD8F4',
navy_2: '#DDE3F7',
navy_1: '#EBEFFA',
navy_9: '#294CB5',
navy_8: '#2C51BE',
yellow_15: '#B97600',
yellow_14: '#C88900',
yellow_17: '#F8F6CC',
yellow_16: '#8C5C0A',
yellow_11: '#EDC800',
yellow_10: '#F6DA00',
yellow_13: '#D6A000',
yellow_12: '#E4B600',
yellow_19: '#F6EF8A',
yellow_18: '#F8F5A1',
purple_20: '#664473',
purple_21: '#352139',
magenta_1: '#FCE9EF',
magenta_3: '#F9CADA',
magenta_2: '#FBDAE4',
magenta_5: '#EF769E',
magenta_4: '#F4A3BF',
magenta_7: '#E1316A',
magenta_6: '#E9487E',
magenta_9: '#CF1750',
magenta_8: '#D91955',
indigo_3: '#D4CCF3',
indigo_2: '#E1DBF6',
indigo_1: '#EDE9FA',
indigo_7: '#5438C8',
indigo_6: '#694FD6',
indigo_5: '#8F7BE0',
indigo_4: '#B4A7EB',
indigo_9: '#3B1EB1',
indigo_8: '#482BBD',
magenta_20: '#9E5F70',
magenta_21: '#6D4046',
magenta_22: '#2C1B1E',
purple_22: '#231727',
red_9: '#D20000',
red_8: '#DA0000',
red_5: '#F06D6D',
red_4: '#F59999',
red_7: '#E31A1A',
red_6: '#EA4444',
red_1: '#FDE6E6',
red_3: '#F9C4C4',
red_2: '#FBD5D5',
grey_21: '#282727',
grey_20: '#4D4D4D',
grey_22: '#1C1B1B',
lime_16: '#30471D',
lime_17: '#D9E6CE',
lime_14: '#406422',
lime_15: '#39581E',
lime_12: '#57882E',
lime_13: '#4C7628',
lime_10: '#6EAC3A',
lime_11: '#639A34',
lime_18: '#BAD4A4',
lime_19: '#9EBB84',
dirt_18: '#81746B',
violet_8: '#B80F87',
violet_9: '#AE0E7F',
violet_4: '#E79FD5',
violet_5: '#DA70BF',
violet_6: '#CE40AA',
violet_7: '#C32899',
violet_1: '#F9E8F5',
violet_2: '#F5D8EE',
violet_3: '#F1C8E6',
red_20: '#A55C5C',
red_21: '#784343',
red_22: '#361F1F',
amber_19: '#F6D18A',
amber_18: '#F8DCA1',
amber_13: '#D67100',
amber_12: '#E48000',
amber_11: '#ED8D00',
amber_10: '#F69A00',
amber_17: '#F8EACC',
amber_16: '#8C440A',
amber_15: '#B95300',
amber_14: '#C86100',
green_17: '#CDDDCD',
green_16: '#254325',
green_15: '#274F28',
green_14: '#2C5A2E',
green_13: '#356A36',
green_12: '#3D7A3E',
green_11: '#458A46',
green_10: '#4C9A4E',
green_19: '#82A682',
green_18: '#A2C2A2',
turquoise_19: '#77AC9B',
turquoise_18: '#99C7B9',
turquoise_15: '#185240',
turquoise_14: '#1B5D48',
turquoise_17: '#C9E0D9',
turquoise_16: '#1A4538',
turquoise_11: '#2B8F70',
turquoise_10: '#309F7D',
turquoise_13: '#216E56',
turquoise_12: '#267E63',
aqua_22: '#1A2C2B',
aqua_20: '#4F7E81',
aqua_21: '#2B4447',
yellow_9: '#FAE300',
yellow_8: '#FDEC00',
yellow_5: '#FFFA66',
yellow_4: '#FFFB98',
yellow_7: '#FEF21A',
yellow_6: '#FFF833',
yellow_1: '#FFFEE6',
yellow_3: '#FFFDC4',
yellow_2: '#FFFED5',
teal_20: '#507481',
teal_21: '#2C3F46',
teal_22: '#152629',
indigo_22: '#1A1724',
purple_12: '#5C097A',
indigo_20: '#59507E',
indigo_21: '#312C45',
purple_17: '#D5C1DF',
purple_16: '#360B40',
purple_15: '#3F054C',
purple_14: '#460658',
purple_19: '#9563A8',
purple_18: '#B389C6',
blue_19: '#7597B6',
lime_22: '#1E241A',
lime_21: '#3A4531',
lime_20: '#6C815B',
turquoise_1: '#ECF9F5',
turquoise_3: '#D2F1E7',
turquoise_2: '#DFF5EE',
turquoise_5: '#89DAC1',
turquoise_4: '#B1E7D6',
turquoise_7: '#4DC19C',
turquoise_6: '#62CEAD',
turquoise_9: '#34A984',
turquoise_8: '#37B38B',
brown_4: '#DBC3AF',
brown_5: '#C9A686',
brown_2: '#F0E7DE',
brown_3: '#EADDD0',
brown_1: '#F6F0EB',
blue_11: '#1C5C9A',
brown_14: '#4F3319',
purple_8: '#820DAF',
brown_15: '#452D16',
brown_16: '#3C2A18',
brown_18: '#B09C8A',
brick_19: '#E19D84',
brick_18: '#EEB5A1',
brick_15: '#9B1C04',
brick_14: '#A42105',
brick_17: '#F3D7CC',
brick_16: '#7A1F0E',
brick_11: '#CB380B',
brick_10: '#D73F0D',
brick_13: '#B12907',
brick_12: '#BE3009',
olive_13: '#777E23',
olive_12: '#899128',
olive_11: '#9BA52E',
olive_10: '#ADB933',
olive_17: '#E9ECD0',
olive_16: '#494C1B',
olive_15: '#595E1A',
olive_14: '#656B1D',
olive_19: '#C3C989',
olive_18: '#DBE0A8',
navy_22: '#232734',
navy_20: '#555F81',
navy_21: '#2E3446',
brown_21: '#352D24',
brown_20: '#645446',
brown_22: '#29231C',
dirt_4: '#C3B5A9',
dirt_5: '#A68F7E',
dirt_6: '#886A53',
dirt_7: '#75553C',
dirt_1: '#F0EDEA',
dirt_2: '#E7E1DC',
dirt_3: '#DDD4CD',
dirt_8: '#624025',
dirt_9: '#5D3D23',
green_3: '#DAEFDA',
green_2: '#E4F3E4',
green_1: '#EFF8EF',
green_7: '#6ABB6B',
green_6: '#7DC97F',
green_5: '#9ED79F',
green_4: '#BEE4BF',
green_9: '#52A353',
green_8: '#57AD57',
brown_10: '#88572C',
brown_11: '#7A4E28',
brown_12: '#6B4523',
brown_13: '#5D3C1E',
turquoise_20: '#52776C',
turquoise_21: '#2C403A',
turquoise_22: '#1E2C27',
brown_17: '#D4CAC1',
grey_6: '#A6A5A5',
brown_19: '#917A66',
aqua_19: '#70B4B7',
aqua_18: '#94CFD2',
aqua_13: '#0E7077',
aqua_12: '#108188',
aqua_11: '#12939A',
aqua_10: '#13A4AB',
aqua_17: '#C6E4E5',
aqua_16: '#0F474A',
aqua_15: '#0A545A',
aqua_14: '#0B5F65'
};
/* eslint-enable quote-props */
export default COLORS;
export const ColorsByTheme = Object.keys(COLORS).reduce((accu, key) => {
if (!key.includes('_')) {
return accu;
}
const [theme, idx] = key.split('_');
return {
...accu,
[theme]: {
...accu[theme],
[idx]: COLORS[key]
}
};
}, {});
// theme name in order wheel order
export const Themes = [
'yellow',
'gold',
'amber',
'orange',
'brick',
'red',
'magenta',
'violet',
'purple',
'indigo',
'navy',
'blue',
'teal',
'aqua',
'turquoise',
'green',
'lime',
'olive',
'grey',
'stone',
'brown',
'dirt'
]; | the_stack |
export enum TokenType {
/** An operator. */
Operator,
/** An identifier. */
Identifier,
/** A string literal. */
String,
/**
* The start of a template until its first expression.
*
* See https://tc39.github.io/ecma262/#sec-template-literal-lexical-components for documentation on the
* ECMAScript lexical components for templates, upon which this is based.
*/
TemplateHead,
/** The end of a previous template expression until the next template expression. */
TemplateMiddle,
/** The end of a previous template expression until the end of the template. */
TemplateTail,
/** A template with no substitutions. */
NoSubstitutionTemplate,
/** A number literal. */
Number,
}
/** A token that the expression lexer scanned in an expression. */
export interface Token {
/** The type of this token. */
type: TokenType
/**
* The token's value.
*
* For string and template literals, this is the parsed string value (after accounting for escape sequences but
* not template expressions). For number literals, this is the (unparsed) string representation.
*/
value: any
/** The start character position of this token. */
start: number
/** The end character position of this token. */
end: number
}
/**
* All valid operators in expressions. The values are the operator precedence (or, for operators that are not operators, 0). This
* must be kept in sync with OPERATOR_CHARS.
*
* Exported for testing only.
*/
export const OPERATORS = {
'(': 0,
')': 0,
'}': 0,
',': 0,
'=': 0,
'||': 1,
'&&': 2,
'^': 4,
'==': 6,
'!=': 6,
'===': 6,
'!==': 6,
'<': 7,
'>': 7,
'<=': 7,
'>=': 7,
'+': 9,
'-': 9,
'*': 10,
'/': 10,
'%': 10,
'!': 11,
}
/** All valid operators. */
export type Operator = keyof typeof OPERATORS
export type OperatorTree = boolean | { [ch: string]: OperatorTree }
/**
* A tree with the next valid operator characters for multi-character operators. This must be kept in sync with
* OPERATORS.
*
* Exported for testing only.
*/
export const OPERATOR_CHARS: { [ch: string]: OperatorTree } = {
'&': { '&': true },
'|': { '|': true },
'=': {
'\u0000': true,
'=': {
'\u0000': true,
'=': true,
},
},
'!': {
'\u0000': true,
'=': {
'\u0000': true,
'=': true,
},
},
'<': { '\u0000': true, '=': true },
'>': { '\u0000': true, '=': true },
'^': true,
'}': true,
'(': true,
')': true,
',': true,
'+': true,
'-': true,
'*': true,
'/': true,
'%': true,
}
function isWhiteSpace(character: string): boolean {
return character === '\u0009' || character === ' ' || character === '\u00A0'
}
function isLetter(character: string): boolean {
return (character >= 'a' && character <= 'z') || (character >= 'A' && character <= 'Z')
}
function isDecimalDigit(character: string): boolean {
return character >= '0' && character <= '9'
}
function isIdentifierStart(character: string): boolean {
return character === '_' || isLetter(character)
}
function isIdentifierPart(character: string): boolean {
return isIdentifierStart(character) || isDecimalDigit(character) || character === '.'
}
/** Scans an expression. */
export class Lexer {
private expression = ''
private length = 0
protected _index = 0
private marker = 0
protected curlyStack = 0
/** The current character position of the lexer's cursor. */
public get index(): number {
return this._index
}
public reset(string: string): void {
this.expression = string
this.length = string.length
this._index = 0
this.curlyStack = 0
}
public next(): Token | undefined {
this.skipSpaces()
if (this._index >= this.length) {
return undefined
}
this.marker = this._index
const token = this.scanNext()
if (token !== undefined) {
return token
}
throw new SyntaxError(`Unexpected character ${JSON.stringify(this.peekNextChar())} (at ${this.index})`)
}
public peek(): Omit<Token, 'start' | 'end'> | undefined {
const savedIndex = this._index
const savedCurlyStack = this.curlyStack
let token: Token | undefined
try {
token = this.next()
} catch {
token = undefined
}
this._index = savedIndex
this.curlyStack = savedCurlyStack
if (!token) {
return undefined
}
return { type: token.type, value: token.value }
}
protected scanNext(): Token | undefined {
let token = this.scanString()
if (token !== undefined) {
return token
}
token = this.scanTemplate()
if (token !== undefined) {
return token
}
token = this.scanNumber()
if (token !== undefined) {
return token
}
token = this.scanOperator()
if (token !== undefined) {
return token
}
token = this.scanIdentifier()
if (token !== undefined) {
return token
}
return undefined
}
private peekNextChar(advance = 0): string {
const index = this._index + advance
return index < this.length ? this.expression.charAt(index) : '\u0000'
}
private getNextChar(): string {
let character = '\u0000'
const index = this._index
if (index < this.length) {
character = this.expression.charAt(index)
this._index += 1
}
return character
}
private createToken(type: TokenType, value: any): Token {
return {
type,
value,
start: this.marker,
end: this._index,
}
}
private skipSpaces(): void {
while (this._index < this.length) {
const character = this.peekNextChar()
if (!isWhiteSpace(character)) {
break
}
this.getNextChar()
}
}
private scanOperator(): Token | undefined {
let searchTree: OperatorTree | boolean = OPERATOR_CHARS
let value = ''
while (searchTree && searchTree !== true) {
const character = this.peekNextChar()
searchTree = searchTree[character]
if (searchTree) {
value += character
this.getNextChar()
}
}
if (value === '}') {
this.curlyStack--
}
if (value === '') {
return undefined
}
return this.createToken(TokenType.Operator, value)
}
private scanIdentifier(): Token | undefined {
let character = this.peekNextChar()
if (!isIdentifierStart(character)) {
return undefined
}
let id = this.getNextChar()
while (true) {
character = this.peekNextChar()
if (!isIdentifierPart(character)) {
break
}
id += this.getNextChar()
}
return this.createToken(TokenType.Identifier, id)
}
private scanString(): Token | undefined {
const quote = this.peekNextChar()
if (quote !== "'" && quote !== '"') {
return undefined
}
this.getNextChar()
let terminated = false
let string = ''
while (this._index < this.length) {
const character = this.getNextChar()
if (character === quote) {
terminated = true
break
}
if (character === '\\') {
string += backslashEscapeCodeString(this.getNextChar())
} else {
string += character
}
}
if (!terminated) {
throw new Error(`Unterminated string literal (at ${this.index})`)
}
return this.createToken(TokenType.String, string)
}
private scanTemplate(): Token | undefined {
const character = this.peekNextChar()
if (!(character === '`' || (character === '}' && this.curlyStack > 0))) {
return undefined
}
this.getNextChar()
const head = character === '`'
return this.doScanTemplate(head)
}
protected backtick(): boolean {
return true
}
protected doScanTemplate(head: boolean): Token {
let tail = false
let terminated = false
let hasSubstitution = false
let string = ''
while (this._index < this.length) {
const character = this.getNextChar()
if (character === '`' && this.backtick()) {
tail = true
terminated = true
break
}
if (character === '\\') {
string += backslashEscapeCodeString(this.getNextChar())
} else {
if (character === '$') {
const character2 = this.peekNextChar()
if (character2 === '{') {
this.curlyStack++
this.getNextChar()
terminated = true
hasSubstitution = true
break
}
}
string += character
}
}
if (!head) {
this.curlyStack--
}
if (this.backtick()) {
if (!terminated) {
throw new Error(`Unterminated template literal (at ${this.index})`)
}
} else if (this._index === this.length) {
tail = true
}
let type: TokenType
if (head && terminated && !hasSubstitution) {
type = TokenType.NoSubstitutionTemplate
} else if (head) {
type = TokenType.TemplateHead
} else if (tail) {
type = TokenType.TemplateTail
} else {
type = TokenType.TemplateMiddle
}
return this.createToken(type, string)
}
private scanNumber(): Token | undefined {
let character = this.peekNextChar()
if (!isDecimalDigit(character) && character !== '.') {
return undefined
}
let number = ''
if (character !== '.') {
number = this.getNextChar()
while (true) {
character = this.peekNextChar()
if (!isDecimalDigit(character)) {
break
}
number += this.getNextChar()
}
}
if (character === '.') {
number += this.getNextChar()
while (true) {
character = this.peekNextChar()
if (!isDecimalDigit(character)) {
break
}
number += this.getNextChar()
}
}
if (character === 'e' || character === 'E') {
number += this.getNextChar()
character = this.peekNextChar()
if (character === '+' || character === '-' || isDecimalDigit(character)) {
number += this.getNextChar()
while (true) {
character = this.peekNextChar()
if (!isDecimalDigit(character)) {
break
}
number += this.getNextChar()
}
} else {
character = `character ${JSON.stringify(character)}`
if (this._index >= this.length) {
character = '<end>'
}
throw new SyntaxError(`Unexpected ${character} after the exponent sign (at ${this.index})`)
}
}
if (number === '.') {
throw new SyntaxError(`Expected decimal digits after the dot sign (at ${this.index})`)
}
return this.createToken(TokenType.Number, number)
}
}
/** Scans a template. */
export class TemplateLexer extends Lexer {
public next(): Token | undefined {
if (this._index === 0) {
return this.doScanTemplate(true)
}
return super.next()
}
protected backtick(): boolean {
// The root is not surrounded with backticks.
return this.curlyStack !== 0
}
}
function backslashEscapeCodeString(character: string): string {
switch (character) {
case 'n':
return '\n'
case 'r':
return '\r'
case 't':
return '\t'
default:
return character
}
} | the_stack |
/// <reference path="libs/mulThms.d.ts" />
module NavierStokes {
/*@ solver :: {v:FluidField<Mutable> | offset(v,"width") = 128 && offset(v,"height") = 128} + null */
let solver:FluidField<Mutable> = null;
/*@ nsFrameCounter :: number */
let nsFrameCounter = 0;
module TopLevelFuncs {
export function runNavierStokes()
{
let solverRO /*@ readonly */ = solver;
if (!solverRO) throw new Error("solver is null! did you forget to call setupNavierStokes first?");
solverRO.update();
nsFrameCounter++;
if(nsFrameCounter===15)
checkResult(solverRO.getDens());
}
/*@ checkResult :: ({v:IArray<number> | (len v) = (128+2) * (128+2)}) => void */
function checkResult(dens) {
let _lemma = mulThm130(130);
let result = 0;
for (let i=7000;i<7100;i++) {
result = result + (dens[i]*10); //PORT TODO: result+=~~((dens[i]*10));
}
if (result!==74) {
console.log("checksum failed: " + result);
}
}
export function setupNavierStokes()
{
let _lemma = mulThm128(128);
let solverRO /*@ readonly */ = new FluidField(null, 128, 128);
solverRO.setIterations(20);
let dFunc: (f:Field<Immutable>) => void = function(f) {};
solverRO.setDisplayFunction(dFunc);
solverRO.setUICallback(prepareFrame);
solverRO.reset();
solver = solverRO;
}
export function tearDownNavierStokes()
{
solver = null;
}
/*@ addPoints :: ({v:Field<Mutable> | offset(v,"w") = 128 && offset(v,"h") = 128}) => void */
function addPoints(field:Field<Mutable>) {
let n = 64;
for (let i = 1; i <= n; i++) {
field.setVelocity(i, i, n, n);
field.setDensity(i, i, 5);
field.setVelocity(i, n - i, -n, -n);
field.setDensity(i, n - i, 20);
field.setVelocity(128 - i, n + i, -n, -n);
field.setDensity(128 - i, n + i, 30);
}
}
/*@ prepareFrame :: ({v:Field<Mutable> | offset(v,"w") = 128 && offset(v,"h") = 128}) => void */
function prepareFrame(field:Field<Mutable>)
{
let framesTillAddingPoints = 0;
let framesBetweenAddingPoints = 5;
if (framesTillAddingPoints === 0) {
addPoints(field);
framesTillAddingPoints = framesBetweenAddingPoints;
framesBetweenAddingPoints++;
} else {
framesTillAddingPoints--;
}
}
}
// Code from Oliver Hunt (http://nerget.com/fluidSim/pressure.js) starts here.
export class FluidField<M extends ReadOnly> {
/*@ (Immutable) width : pos */
private width;
/*@ (Immutable) height : pos */
private height;
/*@ (Immutable) rowSize : {v:number | v = this.width + 2} */
private rowSize;
/*@ (Immutable) size : {v:number | v = (this.height+2) * (this.width+2)} */
private size;
/*@ (Immutable) dens : {v:IArray<number> | (len v) = this.size} */
private dens;
/*@ (Immutable) dens_prev : {v:IArray<number> | (len v) = this.size} */
private dens_prev;
/*@ (Immutable) u : {v:IArray<number> | (len v) = this.size} */
private u;
/*@ (Immutable) u_prev : {v:IArray<number> | (len v) = this.size} */
private u_prev;
/*@ (Immutable) v : {v:IArray<number> | (len v) = this.size} */
private v;
/*@ (Immutable) v_prev : {v:IArray<number> | (len v) = this.size} */
private v_prev;
private iters:number;
private visc:number;
private dt:number;
private displayFunc: (f:Field<Immutable>) => void;
/*@ uiCallback : ({v:Field<Mutable> | offset(v,"w") = this.width && offset(v,"h") = this.height}) => void */
private uiCallback;
/*@ new (canvas:top,
hRes:pos,
wRes:{v:pos | hRes * v < 1000000}) : {v:FluidField<M> | offset(v,"width") = wRes && offset(v,"height") = hRes} */
constructor(canvas, hRes, wRes) {
let width = wRes;
let height = hRes;
let size = (height+2) * (width+2);
this.width = width;
this.height = height;
this.rowSize = width + 2;
this.size = size;
let dens :IArray<number> = new Array<number>(size);
let dens_prev :IArray<number> = new Array<number>(size);
let u :IArray<number> = new Array<number>(size);
let u_prev :IArray<number> = new Array<number>(size);
let v :IArray<number> = new Array<number>(size);
let v_prev :IArray<number> = new Array<number>(size);
for (let i = 0; i < size; i++) {
dens_prev[i] = 0; u_prev[i] = 0; v_prev[i] = 0; dens[i] = 0; u[i] = 0; v[i] = 0;
}
this.dens = dens;
this.dens_prev = dens_prev;
this.u = u;
this.u_prev = u_prev;
this.v = v;
this.v_prev = v_prev;
this.iters = 10;
this.visc = 1/2;//.
this.dt = 1/10;//.
this.displayFunc = <(f:Field<Immutable>)=>void> function(f) {}; //ORIG: null
this.uiCallback = <(f:Field<Mutable>)=>void> function(field) {};
}
/*@ addFields (x:{v:IArray<number> | (len v) = this.size},
s:{v:IArray<number> | (len v) = this.size},
dt:number) : void */
addFields(x:number[], s:number[], dt:number)
{
for (let i=0; i<this.size; i++) x[i] = x[i] + dt*s[i]; //ORIG: +=
}
/*@ set_bnd (b:number, x:{v:IArray<number> | (len v) = this.size}) : void */
set_bnd(b:number, x:number[])
{
let width = this.width;
let height = this.height;
let rowSize = this.rowSize;
let _lemma0 = mulThm1(rowSize, height+2);
let _lemma1 = mulThm2(rowSize, height, height+2);
let _lemma2 = mulThm2(rowSize, height+1, height+2);
if (b===1) {
for (let i = 1; i <= width; i++) {
x[i] = x[i + rowSize];
x[i + (height+1) *rowSize] = x[i + height * rowSize];
}
for (let j = 1; j <= height; j++) {
let _lemmaJ = mulThm2(rowSize, j, height+2);
x[j * rowSize] = -x[1 + j * rowSize];
x[(width + 1) + j * rowSize] = -x[width + j * rowSize];
}
} else if (b === 2) {
for (let i = 1; i <= width; i++) {
x[i] = -x[i + rowSize];
x[i + (height + 1) * rowSize] = -x[i + height * rowSize];
}
for (let j = 1; j <= height; j++) {
let _lemmaJ = mulThm2(rowSize, j, height+2);
x[j * rowSize] = x[1 + j * rowSize];
x[(width + 1) + j * rowSize] = x[width + j * rowSize];
}
} else {
for (let i = 1; i <= width; i++) {
x[i] = x[i + rowSize];
x[i + (height + 1) * rowSize] = x[i + height * rowSize];
}
for (let j = 1; j <= height; j++) {
let _lemmaJ = mulThm2(rowSize, j, height+2);
x[j * rowSize] = x[1 + j * rowSize];
x[(width + 1) + j * rowSize] = x[width + j * rowSize];
}
}
let maxEdge = (height + 1) * rowSize;
x[0] = 1/2 * (x[1] + x[rowSize]);//.
x[maxEdge] = 1/2 * (x[1 + maxEdge] + x[height * rowSize]);//.
x[(width+1)] = 1/2 * (x[width] + x[(width + 1) + rowSize]);//.
x[(width+1)+maxEdge] = 1/2 * (x[width + maxEdge] + x[(width + 1) + height * rowSize]);//.
}
/*@ lin_solve (b :number,
x :{v:IArray<number> | (len v) = this.size},
x0:{v:IArray<number> | (len v) = this.size},
a :number,
c :{v:number | v != 0}) : void */
lin_solve(b:number, x:number[], x0:number[], a:number, c:number)
{
let width = this.width;
let height = this.height;
let rowSize = this.rowSize;
if (a === 0 && c === 1) {
for (let j=1 ; j<=height; j++) {
let currentRow = j * rowSize;
let _lemma = mulThm2(rowSize, j, height+2);
++currentRow;
for (let i = 0; i < width; i++) {
//needs Add2
x[currentRow] = x0[currentRow];
++currentRow;
}
}
this.set_bnd(b, x);
} else {
let invC = 1 / c;
for (let k=0 ; k<this.iters; k++) {
for (let j=1 ; j<=height; j++) {
let lastRow = (j - 1) * rowSize;
let currentRow = j * rowSize;
let nextRow = (j + 1) * rowSize;
let _lemma1 = mulThm2(rowSize, j-1, height+2);
let _lemma2 = mulThm2(rowSize, j, height+2);
let _lemma3 = mulThm2(rowSize, j+1, height+2);
let lastX = x[currentRow];
++currentRow;
for (let i=1; i<=width; i++) {
x[currentRow] = (x0[currentRow] + a*(lastX+x[++currentRow]+x[++lastRow]+x[++nextRow])) * invC;
lastX = x[currentRow];
}
}
this.set_bnd(b, x);
}
}
}
/*@ diffuse (b :number,
x :{v:IArray<number> | (len v) = this.size},
x0:{v:IArray<number> | (len v) = this.size},
dt:number) : void */
diffuse(b:number, x:number[], x0:number[], dt:number)
{
let a = 0;
this.lin_solve(b, x, x0, a, 1 + 4*a);
}
/*@ lin_solve2 (x :{v:IArray<number> | (len v) = this.size},
x0:{v:IArray<number> | (len v) = this.size},
y :{v:IArray<number> | (len v) = this.size},
y0:{v:IArray<number> | (len v) = this.size},
a :number,
c :{v:number | v != 0}) : void */
lin_solve2(x:number[], x0:number[], y:number[], y0:number[], a:number, c:number)
{
let width = this.width;
let height = this.height;
let rowSize = this.rowSize;
if (a === 0 && c === 1) {
for (let j=1 ; j <= height; j++) {
let currentRow = j * rowSize;
let _lemma = mulThm2(rowSize, j, height+2);
++currentRow;
for (let i = 0; i < width; i++) {
//needs Add2
x[currentRow] = x0[currentRow];
y[currentRow] = y0[currentRow];
++currentRow;
}
}
this.set_bnd(1, x);
this.set_bnd(2, y);
} else {
let invC = 1/c;
for (let k=0 ; k<this.iters; k++) {
for (let j=1 ; j <= height; j++) {
let lastRow = (j - 1) * rowSize;
let currentRow = j * rowSize;
let nextRow = (j + 1) * rowSize;
let _lemma1 = mulThm2(rowSize, j-1, height+2);
let _lemma2 = mulThm2(rowSize, j, height+2);
let _lemma3 = mulThm2(rowSize, j+1, height+2);
let lastX = x[currentRow];
let lastY = y[currentRow];
++currentRow;
for (let i = 1; i <= width; i++) {
x[currentRow] = (x0[currentRow] + a * (lastX + x[currentRow] + x[lastRow] + x[nextRow])) * invC;
lastX = x[currentRow];
y[currentRow] = (y0[currentRow] + a * (lastY + y[++currentRow] + y[++lastRow] + y[++nextRow])) * invC;
lastY = y[currentRow];
}
}
this.set_bnd(1, x);
this.set_bnd(2, y);
}
}
}
/*@ diffuse2 (x :{v:IArray<number> | (len v) = this.size},
x0:{v:IArray<number> | (len v) = this.size},
y :{v:IArray<number> | (len v) = this.size},
y0:{v:IArray<number> | (len v) = this.size},
dt:number) : void */
diffuse2(x:number[], x0:number[], y:number[], y0:number[], dt:number)
{
let a = 0;
this.lin_solve2(x, x0, y, y0, a, 1 + 4 * a);
}
/*@ advect (b :number,
d :{v:IArray<number> | (len v) = this.size},
d0:{v:IArray<number> | (len v) = this.size},
u :{v:IArray<number> | (len v) = this.size},
v :{v:IArray<number> | (len v) = this.size},
dt:number) : void */
advect(b:number, d:number[], d0:number[], u:number[], v:number[], dt:number)
{
let width = this.width;
let height = this.height;
let rowSize = this.rowSize;
let Wdt0 = dt * width;
let Hdt0 = dt * height;
let Wp5 = width + 1/2;//.
let Hp5 = height + 1/2;//.
for (let j = 1; j<= height; j++) {
let pos = j * rowSize;
let _lemma = mulThm2(rowSize, j, height+2);
for (let i = 1; i <= width; i++) {
let x:any = i - Wdt0 * u[++pos];
let y:any = j - Hdt0 * v[pos];
if (x < 1/2)//.
x = 1/2;//.
else if (x > Wp5)
x = Wp5;
let i0 = Math.floor(x); //ORIG: x | 0;
let i1 = i0 + 1;
if (y < 1/2)//.
y = 1/2;//.
else if (y > Hp5)
y = Hp5;
let j0 = Math.floor(y); //ORIG: y | 0;
let j1 = j0 + 1;
let s1 = x - i0;
let s0 = 1 - s1;
let t1 = y - j0;
let t0 = 1 - t1;
let row1 = j0 * rowSize;
let row2 = j1 * rowSize;
mulThm2(rowSize, j0, height+2);
mulThm2(rowSize, j1, height+2);
d[pos] = s0 * (t0 * d0[i0 + row1] + t1 * d0[i0 + row2]) + s1 * (t0 * d0[i1 + row1] + t1 * d0[i1 + row2]);
}
}
this.set_bnd(b, d);
}
/*@ project (u :{v:IArray<number> | (len v) = this.size},
v :{v:IArray<number> | (len v) = this.size},
p :{v:IArray<number> | (len v) = this.size},
divv:{v:IArray<number> | (len v) = this.size}) : void */
project(u:number[], v:number[], p:number[], divv:number[])
{
let width = this.width;
let height = this.height;
let rowSize = this.rowSize;
let h = -(1/2) / Math.sqrt(width * height);//.
for (let j = 1 ; j <= height; j++ ) {
let row = j * rowSize;
let previousRow = (j - 1) * rowSize;
let prevValue = row - 1;
let currentRow = row;
let nextValue = row + 1;
let nextRow = (j + 1) * rowSize;
let _lemma1 = mulThm2(rowSize, j-1, height+2);
let _lemma2 = mulThm2(rowSize, j, height+2);
let _lemma3 = mulThm2(rowSize, j+1, height+2);
for (let i = 1; i <= width; i++ ) {
divv[++currentRow] = h * (u[++nextValue] - u[++prevValue] + v[++nextRow] - v[++previousRow]);
p[currentRow] = 0;
}
}
this.set_bnd(0, divv);
this.set_bnd(0, p);
this.lin_solve(0, p, divv, 1, 4 );
let wScale = 1/2 * width;//.
let hScale = 1/2 * height;//.
for (let k = 1; k<= height; k++ ) {
let prevPos = k * rowSize - 1;
let currentPos = k * rowSize;
let nextPos = k * rowSize + 1;
let prevRow = (k - 1) * rowSize;
let currentRow = k * rowSize;
let nextRow = (k + 1) * rowSize;
let _lemma1 = mulThm2(rowSize, k-1, height+2);
let _lemma2 = mulThm2(rowSize, k, height+2);
let _lemma3 = mulThm2(rowSize, k+1, height+2);
for (let i = 1; i<= width; i++) {
++currentPos;
u[currentPos] = u[currentPos] - wScale * (p[++nextPos] - p[++prevPos]); //ORIG: -=
v[currentPos] = v[currentPos] - hScale * (p[++nextRow] - p[++prevRow]); //ORIG: -=
}
}
this.set_bnd(1, u);
this.set_bnd(2, v);
}
/*@ dens_step (x :{v:IArray<number> | (len v) = this.size},
x0:{v:IArray<number> | (len v) = this.size},
u :{v:IArray<number> | (len v) = this.size},
v :{v:IArray<number> | (len v) = this.size},
dt:number) : void */
dens_step(x:number[], x0:number[], u:number[], v:number[], dt:number)
{
this.addFields(x, x0, dt);
this.diffuse(0, x0, x, dt );
this.advect(0, x, x0, u, v, dt );
}
/*@ vel_step (u :{v:IArray<number> | (len v) = this.size},
v :{v:IArray<number> | (len v) = this.size},
u0:{v:IArray<number> | (len v) = this.size},
v0:{v:IArray<number> | (len v) = this.size},
dt:number) : void */
vel_step(u:number[], v:number[], u0:number[], v0:number[], dt:number)
{
this.addFields(u, u0, dt );
this.addFields(v, v0, dt );
let temp = u0; u0 = u; u = temp;
temp = v0; v0 = v; v = temp;
this.diffuse2(u,u0,v,v0, dt);
this.project(u, v, u0, v0);
temp = u0; u0 = u; u = temp;
temp = v0; v0 = v; v = temp;
this.advect(1, u, u0, u0, v0, dt);
this.advect(2, v, v0, u0, v0, dt);
this.project(u, v, u0, v0 );
}
/*@ queryUI (d:{v:IArray<number> | (len v) = this.size},
u:{v:IArray<number> | (len v) = this.size},
v:{v:IArray<number> | (len v) = this.size}) : void */
queryUI(d:number[], u:number[], v:number[])
{
for (let i = 0; i < this.size; i++) {
u[i] = 0; v[i] = 0; d[i] = 0;//.
}
this.uiCallback(new Field(this.rowSize, this.width, this.height, d, u, v));
}
public update()
{
this.queryUI(this.dens_prev, this.u_prev, this.v_prev);
this.vel_step(this.u, this.v, this.u_prev, this.v_prev, this.dt);
this.dens_step(this.dens, this.dens_prev, this.u, this.v, this.dt);
this.displayFunc(new Field(this.rowSize, this.width, this.height, this.dens, this.u, this.v));
}
/*@ @Mutable setDisplayFunction ((Field<Immutable>)=>void) : void */
public setDisplayFunction(f:(f:Field<Immutable>) => void) {
this.displayFunc = f;
}
public iterations() { return this.iters; }
/*@ @Mutable setIterations (iters:number) : void */
public setIterations(iters:number)
{
if (iters > 0 && iters <= 100)
this.iters = iters;
}
/*@ @Mutable setUICallback (({v:Field<Mutable> | offset(v,"w") = this.width && offset(v,"h") = this.height})=>void) : void */
public setUICallback(callback:(f:Field<Mutable>) => void) {
this.uiCallback = callback;
}
public reset()
{
for (let i = 0; i < this.size; i++) {
this.dens_prev[i] = 0; this.u_prev[i] = 0; this.v_prev[i] = 0; this.dens[i] = 0; this.u[i] = 0; this.v[i] = 0;
}
}
/*@ getDens () : {v:IArray<number> | (len v) = this.size} */
public getDens()
{
return this.dens;
}
}
export class Field<M extends ReadOnly> {
/*@ (Immutable) rowSize : {v:number | v = this.w + 2} */
private rowSize;
/*@ (Immutable) w : pos */
private w;
/*@ (Immutable) h : pos */
private h;
/*@ (Immutable) dens : {v:IArray<number> | (len v) = (this.h + 2) * (this.w + 2)} */
private dens;
/*@ (Immutable) u : {v:IArray<number> | (len v) = (this.h + 2) * (this.w + 2)} */
private u;
/*@ (Immutable) v : {v:IArray<number> | (len v) = (this.h + 2) * (this.w + 2)} */
private v;
/*@ new (rowSize: {v:number | v = w+2},
w: pos,
h: pos,
dens: {v:IArray<number> | (len v) = (h+2) * (w+2)},
u: {v:IArray<number> | (len v) = (h+2) * (w+2)},
v: {v:IArray<number> | (len v) = (h+2) * (w+2)}) : {v:Field<M> | offset(v,"w") = w && offset(v,"h") = h } */
constructor(rowSize:number, w:number, h:number, dens:number[], u:number[], v:number[]) {
this.rowSize = rowSize;
this.w = w;
this.h = h;
this.dens = dens;
this.u = u;
this.v = v;
}
/*@ setDensity (x:{v:nat | v <= this.w}, y:{v:nat | v <= this.h}, d:number) : void */
public setDensity(x:number, y:number, d:number) {
let _lemma = mulThm2(this.rowSize, y+1, this.h+2);
this.dens[(x + 1) + (y + 1) * this.rowSize] = d;
}
/*@ getDensity (x:{v:nat | v <= this.w}, y:{v:nat | v <= this.h}) : number */
public getDensity(x:number, y:number) {
let _lemma = mulThm2(this.rowSize, y+1, this.h+2);
return this.dens[(x + 1) + (y + 1) * this.rowSize];
}
/*@ setVelocity (x:{v:nat | v <= this.w}, y:{v:nat | v <= this.h}, xv:number, yv:number) : void */
public setVelocity(x:number, y:number, xv:number, yv:number) {
let _lemma = mulThm2(this.rowSize, y+1, this.h+2);
this.u[(x + 1) + (y + 1) * this.rowSize] = xv;
this.v[(x + 1) + (y + 1) * this.rowSize] = yv;
}
/*@ getXVelocity (x:{v:nat | v <= this.w}, y:{v:nat | v <= this.h}) : number */
public getXVelocity(x:number, y:number) {
let _lemma = mulThm2(this.rowSize, y+1, this.h+2);
return this.u[(x + 1) + (y + 1) * this.rowSize];
}
/*@ getYVelocity (x:{v:nat | v <= this.w}, y:{v:nat | v <= this.h}) : number */
public getYVelocity(x:number, y:number) {
let _lemma = mulThm2(this.rowSize, y+1, this.h+2);
return this.v[(x + 1) + (y + 1) * this.rowSize];
}
/*@ width () : {v:number | v = this.w} */
public width():number { return this.w; }
/*@ height () : {v:number | v = this.h} */
public height():number { return this.h; }
}
} | the_stack |
import React from 'react'
import { getWindow } from 'ssr-window'
import { fireEvent } from '@testing-library/dom'
/* Internal dependencies */
import { TransitionDuration } from 'Foundation'
import { render } from 'Utils/testUtils'
import OverlayProps, { ContainerRectAttr, TargetRectAttr, OverlayPosition } from './Overlay.types'
import Overlay, { CONTAINER_TEST_ID, ESCAPE_KEY, OVERLAY_TEST_ID, WRAPPER_TEST_ID } from './Overlay'
import { getOverlayTranslation } from './utils'
const RootOverlay: React.FC<OverlayProps> = ({ children, ...rests }) => (
<div id="main">
<Overlay {...rests}>
{ children }
</Overlay>
</div>
)
describe('Overlay test >', () => {
let props: OverlayProps
beforeEach(() => {
props = {
container: getWindow().document.body,
show: true,
}
})
const renderOverlay = (optionProps?: OverlayProps) => render(
<div>
<div />
<Overlay {...props} {...optionProps}>
<div>
test
</div>
</Overlay>
</div>,
)
it('Snapshot >', () => {
// const { getByTestId: getContainerTestId } = renderContainer()
// const renderedContainer = getContainerTestId('container')
const { getByTestId } = renderOverlay()
const rendered = getByTestId(OVERLAY_TEST_ID)
expect(rendered).toMatchSnapshot()
})
describe('PositionUtils >', () => {
const overlay = {
getBoundingClientRect: () => ({
width: 400,
height: 400,
}),
} as HTMLElement
const targetRect: TargetRectAttr = {
targetWidth: 100,
targetHeight: 100,
targetTop: 450,
targetLeft: 450,
clientTop: 0,
clientLeft: 0,
}
const containerRect: ContainerRectAttr = {
containerWidth: 1000,
containerHeight: 1000,
containerTop: 0,
containerLeft: 0,
scrollTop: 0,
scrollLeft: 0,
}
describe('getOverlayTranslation() > ', () => {
it('Without any option', () => {
const result = getOverlayTranslation({
overlay: null,
targetRect: null,
position: OverlayPosition.BottomCenter,
marginX: 0,
marginY: 0,
keepInContainer: true,
containerRect: {
containerWidth: 0,
containerHeight: 0,
containerTop: 0,
containerLeft: 0,
scrollTop: 0,
scrollLeft: 0,
},
})
expect(result).toEqual({
translateX: 0,
translateY: 0,
})
})
it('BottomLeft 일반적인 경우, targetHeight 만큼 이동.', () => {
const result = getOverlayTranslation({
overlay,
targetRect,
position: OverlayPosition.BottomLeft,
marginX: 0,
marginY: 0,
keepInContainer: true,
containerRect,
})
expect(result).toEqual({
translateX: 0,
translateY: 100,
})
})
it('BottomLeft 아래가 넘어가는 경우, -overlay.height 만큼 이동', () => {
const overflowTarget: TargetRectAttr = {
...targetRect,
targetTop: 950,
}
const result = getOverlayTranslation({
overlay,
targetRect: overflowTarget,
position: OverlayPosition.BottomLeft,
marginX: 0,
marginY: 0,
keepInContainer: true,
containerRect,
})
expect(result).toEqual({
translateX: 0,
translateY: -400,
})
})
it('BottomLeft 아래가 넘어가지만 target 아래쪽 공간이 더 넓을 경우, 일반 상태와 같은 결과.', () => {
const overflowTarget: TargetRectAttr = {
...targetRect,
targetTop: 200,
}
const overflowContainer: ContainerRectAttr = {
...containerRect,
containerHeight: 600,
}
/*
containerHeight: 600,
targetHeight: 100에
targetTop: 200이므로,
target의 아래쪽 공간은 300이다.
300이 200보다 크므로, overlay는 아래쪽에 나타나야 함.
*/
const result = getOverlayTranslation({
overlay,
targetRect: overflowTarget,
position: OverlayPosition.BottomLeft,
marginX: 0,
marginY: 0,
keepInContainer: true,
containerRect: overflowContainer,
})
expect(result).toEqual({
translateX: 0,
translateY: 100,
})
})
})
})
describe('Props and Event', () => {
const renderRootOverlay = (optionProps?: OverlayProps) => render(<RootOverlay {...props} {...optionProps} />)
beforeEach(() => {
props = {
show: true,
className: '',
containerClassName: '',
position: OverlayPosition.LeftCenter,
marginX: 0,
marginY: 0,
keepInContainer: false,
withTransition: false,
enableClickOutside: false,
children: 'Test Overlay',
}
})
describe('Props', () => {
describe('show', () => {
describe('is True', () => {
it('container style', () => {
const { getByTestId } = renderRootOverlay()
const overlay = getByTestId(CONTAINER_TEST_ID)
expect(overlay).toHaveStyle('position: fixed')
expect(overlay).toHaveStyle('top: 0')
expect(overlay).toHaveStyle('right: 0')
expect(overlay).toHaveStyle('bottom: 0')
expect(overlay).toHaveStyle('left: 0')
expect(overlay).toHaveStyle('width: 100%')
expect(overlay).toHaveStyle('height: 100%')
expect(overlay).toHaveStyle('pointer-events: all')
})
it('wrapper style', () => {
const { getByTestId } = renderRootOverlay()
const overlay = getByTestId(WRAPPER_TEST_ID)
expect(overlay).toHaveStyle('position: relative')
expect(overlay).toHaveStyle('width: 100%')
expect(overlay).toHaveStyle('height: 100%')
})
it('overlay style', () => {
const { getByTestId } = renderRootOverlay()
const overlay = getByTestId(OVERLAY_TEST_ID)
expect(overlay).toHaveStyle('position: absolute')
})
})
describe('is False', () => {
it('container style', () => {
const { container } = renderRootOverlay()
// <main id="main" />
expect(container.children.length).toBe(1)
})
})
})
describe('className', () => {
it('is transferred', () => {
const CLASSNAME = 'Test__Overlay'
const { getByTestId } = renderRootOverlay({ className: CLASSNAME })
const overlay = getByTestId(OVERLAY_TEST_ID)
expect(overlay).toHaveClass(CLASSNAME)
})
})
describe('style', () => {
it('is transferred', () => {
const STYLE: React.CSSProperties = {
width: '100px',
}
const { getByTestId } = renderRootOverlay({ style: STYLE })
const overlay = getByTestId(OVERLAY_TEST_ID)
expect(overlay).toHaveStyle('width: 100px')
})
})
describe('containerClassName', () => {
it('is transferred', () => {
const CLASSNAME = 'Test__Container'
const { getByTestId } = renderRootOverlay({ containerClassName: CLASSNAME })
const overlay = getByTestId(CONTAINER_TEST_ID)
expect(overlay).toHaveClass(CLASSNAME)
})
})
describe('containerStyle', () => {
it('is transferred', () => {
const STYLE: React.CSSProperties = {
width: '100px',
}
const { getByTestId } = renderRootOverlay({ containerStyle: STYLE })
const overlay = getByTestId(CONTAINER_TEST_ID)
expect(overlay).toHaveStyle('width: 100px')
})
})
describe('enableClickOutside', () => {
document.onclick = jest.fn()
const onHide = jest.fn()
afterEach(jest.clearAllMocks)
it('is True', () => {
const { getByTestId } = renderRootOverlay({ enableClickOutside: true, onHide })
const overlay = getByTestId(CONTAINER_TEST_ID)
overlay.click()
expect(document.onclick).toHaveBeenCalledTimes(1)
expect(onHide).toHaveBeenCalledTimes(1)
overlay.click()
expect(document.onclick).toHaveBeenCalledTimes(2)
expect(onHide).toHaveBeenCalledTimes(2)
})
it('is False - click is stopPropagation ', () => {
const { getByTestId } = renderRootOverlay()
const overlay = getByTestId(CONTAINER_TEST_ID)
overlay.click()
expect(document.onclick).toHaveBeenCalledTimes(0)
expect(onHide).toHaveBeenCalledTimes(0)
overlay.click()
expect(document.onclick).toHaveBeenCalledTimes(0)
expect(onHide).toHaveBeenCalledTimes(0)
})
})
describe('withTransition', () => {
it('is True', () => {
const { getByTestId } = renderRootOverlay({ withTransition: true })
const overlay = getByTestId(OVERLAY_TEST_ID)
expect(overlay).toHaveStyle(`transition-property: ${['top', 'opacity'].join(',')}`)
expect(overlay).toHaveStyle(`transition-duration: ${TransitionDuration.S}ms`)
expect(overlay).toHaveStyle('transition-timing-function: cubic-bezier(.3,0,0,1)')
expect(overlay).toHaveStyle('transition-delay: 0ms')
})
})
})
describe('Event', () => {
describe('keydown', () => {
document.onkeydown = jest.fn()
const onHide = jest.fn()
afterEach(jest.clearAllMocks)
it('is Triggered By Escape', () => {
const { getByTestId } = renderRootOverlay({ withTransition: true, onHide })
const overlay = getByTestId(OVERLAY_TEST_ID)
fireEvent.keyDown(overlay, { key: ESCAPE_KEY })
expect(document.onkeydown).toHaveBeenCalledTimes(1)
expect(onHide).toHaveBeenCalledTimes(1)
fireEvent.keyDown(overlay, { key: ESCAPE_KEY })
expect(document.onkeydown).toHaveBeenCalledTimes(2)
expect(onHide).toHaveBeenCalledTimes(2)
})
it('is not Triggered By All keys except Escape', () => {
const { getByTestId } = renderRootOverlay({ withTransition: true, onHide })
const overlay = getByTestId(OVERLAY_TEST_ID)
fireEvent.keyDown(overlay, { key: 'Enter' })
expect(document.onkeydown).toHaveBeenCalledTimes(1)
expect(onHide).toHaveBeenCalledTimes(0)
fireEvent.keyDown(overlay, { key: 'ArrowRight' })
expect(document.onkeydown).toHaveBeenCalledTimes(2)
expect(onHide).toHaveBeenCalledTimes(0)
fireEvent.keyDown(overlay, { key: 'Z' })
expect(document.onkeydown).toHaveBeenCalledTimes(3)
expect(onHide).toHaveBeenCalledTimes(0)
})
})
})
})
}) | the_stack |
import {
HttpClient,
HttpEvent,
HttpHeaders,
HttpParams,
HttpResponse,
} from '@angular/common/http';
import { Injectable, Injector } from '@angular/core';
import {
ODataActionResource,
ODataBatchResource,
ODataEntityResource,
ODataEntitySetResource,
ODataFunctionResource,
ODataMetadataResource,
ODataNavigationPropertyResource,
ODataResource,
ODataResponse,
ODataSegment,
ODataSingletonResource,
} from './resources/index';
import { ODataCollection, ODataModel } from './models/index';
import { ODataApi } from './api';
import { ODataEntityService } from './services/entity';
import { ODataRequest } from './resources/index';
import { ODataSettings } from './settings';
import { Observable } from 'rxjs';
import { map } from 'rxjs/operators';
function addBody<T>(
options: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body' | 'events' | 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'arraybuffer' | 'blob' | 'json' | 'text';
withCredentials?: boolean;
},
body: T | null
): any {
return {
body,
etag: options.etag,
apiName: options.apiName,
fetchPolicy: options.fetchPolicy,
headers: options.headers,
observe: options.observe,
params: options.params,
reportProgress: options.reportProgress,
responseType: options.responseType,
withCredentials: options.withCredentials,
};
}
@Injectable({
providedIn: 'root',
})
export class ODataClient {
constructor(
private http: HttpClient,
private settings: ODataSettings,
private injector: Injector
) {
this.settings.configure({
requester: (req: ODataRequest<any>): Observable<any> =>
this.http.request(req.method, `${req.url}`, {
body: req.body,
headers: req.headers,
observe: req.observe,
params: req.params,
reportProgress: req.reportProgress,
responseType: req.responseType,
withCredentials: req.withCredentials,
}),
});
}
//#region Resolve Building Blocks
/**
* Resolve the api for the given value.
* Where value is: string type or an string name or an instance of resource.
* @param value The value to resolve.
* @returns The api for the value.
*/
apiFor(value?: ODataResource<any> | string): ODataApi {
let api: ODataApi | undefined = undefined;
if (value instanceof ODataResource)
api = this.settings.findApiForTypes(value.types());
else if (typeof value === 'string')
api =
this.settings.findApiByName(value) ||
this.settings.findApiForType(value);
return api || this.settings.defaultApi();
}
/**
* Resolve the parser for the given string type.
* @param type The string type of the parser.
* @returns The parser for the given type.
*/
parserForType<T>(type: string) {
return this.settings.parserForType<T>(type);
}
/**
* Resolve the enum type for the given string type.
* @param type The string type of the enum type.
* @returns The enum type for the given type.
*/
enumTypeForType<T>(type: string) {
return this.settings.enumTypeForType<T>(type);
}
/**
* Resolve the structured type for the given string type.
* @param type The string type of the structured type.
* @returns The structured type for the given type.
*/
structuredTypeForType<T>(type: string) {
return this.settings.structuredTypeForType<T>(type);
}
/**
* Resolve the callable for the given string type.
* @param type The string type of the callable.
* @returns The callable for the given type.
*/
callableForType<T>(type: string) {
return this.settings.callableForType<T>(type);
}
/**
* Resolve the entity set for the given string type.
* @param type The string type of the entity set.
* @returns The entity set for the given type.
*/
entitySetForType(type: string) {
return this.settings.entitySetForType(type);
}
/**
* Resolve the model for the given string type.
* @param type The string type of the model.
* @returns The model for the given type.
*/
modelForType(type: string): typeof ODataModel {
return this.settings.modelForType(type);
}
/**
* Resolve the collection for the given string type.
* @param type The string type of the collection.
* @returns The collection for the given type.
*/
collectionForType(type: string): typeof ODataCollection {
return this.settings.collectionForType(type);
}
/**
* Resolve the service for the given string type.
* @param type The string type of the service.
* @returns The service for the given type.
*/
serviceForType(type: string): ODataEntityService<any> {
return this.injector.get(this.settings.serviceForType(type));
}
/**
* Resolve the service for the given string entity type.
* @param type The string entity type binding to the service.
* @returns The service for the given entity type.
*/
serviceForEntityType(type: string): ODataEntityService<any> {
return this.injector.get(this.settings.serviceForEntityType(type));
}
enumTypeByName<T>(name: string) {
return this.settings.enumTypeByName<T>(name);
}
structuredTypeByName<T>(name: string) {
return this.settings.structuredTypeByName<T>(name);
}
callableByName<T>(name: string) {
return this.settings.callableByName<T>(name);
}
entitySetByName(name: string) {
return this.settings.entitySetByName(name);
}
modelByName(name: string): typeof ODataModel {
return this.settings.modelByName(name);
}
collectionByName(name: string): typeof ODataCollection {
return this.settings.collectionByName(name);
}
serviceByName(name: string): ODataEntityService<any> {
return this.injector.get(this.settings.serviceByName(name));
}
//#endregion
//#region API Resource Proxy Methods
fromJSON<E>(
json: { segments: ODataSegment[]; options: { [name: string]: any } },
apiNameOrType?: string
):
| ODataEntityResource<E>
| ODataEntitySetResource<E>
| ODataNavigationPropertyResource<E>
| ODataSingletonResource<E>;
fromJSON(
json: { segments: ODataSegment[]; options: { [name: string]: any } },
apiNameOrType?: string
) {
return this.apiFor(apiNameOrType).fromJSON<any>(json);
}
// Requests
/**
* Build a resource for the metadata.
* @param apiName The name of the API.
* @returns The metadata resource.
*/
metadata(apiName?: string): ODataMetadataResource {
return this.apiFor(apiName).metadata();
}
/**
* Build a resource for the batch.
* @param apiName The name of the API.
* @returns The batch resource.
*/
batch(apiName?: string): ODataBatchResource {
return this.apiFor(apiName).batch();
}
/**
* Build a resource for the singleton.
* @param path The full path to the singleton.
* @param apiNameOrType The name of the API or the type of the singleton.
* @returns The singleton resource.
*/
singleton<T>(path: string, apiNameOrType?: string) {
return this.apiFor(apiNameOrType).singleton<T>(path);
}
/**
* Build a resource for the entity set.
* @param path The full path to the entity set.
* @param apiNameOrType The name of the API or the type of the entity set.
* @returns The entity set resource.
*/
entitySet<T>(
path: string,
apiNameOrType?: string
): ODataEntitySetResource<T> {
return this.apiFor(apiNameOrType).entitySet<T>(path);
}
/**
* Build a resource for unbound action.
* @param path The full path to the action.
* @param apiNameOrType The name of the API or the type of the entity.
* @returns The unbound action resource.
*/
action<P, R>(
path: string,
apiNameOrType?: string
): ODataActionResource<P, R> {
return this.apiFor(apiNameOrType).action<P, R>(path);
}
/**
* Build a resource for unbound function.
* @param path The full path to the function.
* @param apiNameOrType The name of the API or the type of the callable.
* @returns The unbound function resource.
*/
function<P, R>(
path: string,
apiNameOrType?: string
): ODataFunctionResource<P, R> {
return this.apiFor(apiNameOrType).function<P, R>(path);
}
//#endregion
// Request headers, get, post, put, patch... etc
request(
method: string,
resource: ODataResource<any>,
options: {
body: any | null;
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<ArrayBuffer>;
request(
method: string,
resource: ODataResource<any>,
options: {
body: any | null;
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<Blob>;
request(
method: string,
resource: ODataResource<any>,
options: {
body: any | null;
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<string>;
request(
method: string,
resource: ODataResource<any>,
options: {
body: any | null;
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
params?: HttpParams | { [param: string]: string | string[] };
observe: 'events';
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<HttpEvent<ArrayBuffer>>;
request(
method: string,
resource: ODataResource<any>,
options: {
body: any | null;
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<HttpEvent<Blob>>;
request(
method: string,
resource: ODataResource<any>,
options: {
body: any | null;
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<HttpEvent<string>>;
request(
method: string,
resource: ODataResource<any>,
options: {
body: any | null;
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
reportProgress?: boolean;
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<HttpEvent<any>>;
request<R>(
method: string,
resource: ODataResource<any>,
options: {
body: any | null;
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
reportProgress?: boolean;
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<HttpEvent<R>>;
request(
method: string,
resource: ODataResource<any>,
options: {
body: any | null;
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<HttpResponse<ArrayBuffer>>;
request(
method: string,
resource: ODataResource<any>,
options: {
body: any | null;
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<HttpResponse<Blob>>;
request(
method: string,
resource: ODataResource<any>,
options: {
body: any | null;
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<ODataResponse<string>>;
request(
method: string,
resource: ODataResource<any>,
options: {
body: any | null;
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
reportProgress?: boolean;
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<ODataResponse<Object>>;
request<R>(
method: string,
resource: ODataResource<any>,
options: {
body: any | null;
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
reportProgress?: boolean;
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<ODataResponse<R>>;
request(
method: string,
resource: ODataResource<any>,
options: {
body: any | null;
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
responseType?: 'json';
reportProgress?: boolean;
withCredentials?: boolean;
}
): Observable<Object>;
request<R>(
method: string,
resource: ODataResource<any>,
options: {
body: any | null;
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
responseType?: 'json';
reportProgress?: boolean;
withCredentials?: boolean;
}
): Observable<R>;
request(
method: string,
resource: ODataResource<any>,
options?: {
body: any | null;
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
params?: HttpParams | { [param: string]: string | string[] };
observe?: 'body' | 'events' | 'response';
reportProgress?: boolean;
responseType?: 'arraybuffer' | 'blob' | 'json' | 'text';
withCredentials?: boolean;
}
): Observable<any>;
request(
method: string,
resource: ODataResource<any>,
options: {
body: any | null;
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body' | 'events' | 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'arraybuffer' | 'blob' | 'json' | 'text';
withCredentials?: boolean;
}
): Observable<any> {
let api = options.apiName
? this.settings.apiByName(options.apiName)
: resource.api;
if (!api)
throw new Error(
`The types: '[${resource
.types()
.join(', ')}]' does not belongs to any known configuration`
);
const request = new ODataRequest({
method,
api,
resource,
body: options.body,
observe: options.observe === 'events' ? 'events' : 'response',
etag: options.etag,
headers: options.headers,
reportProgress: options.reportProgress,
params: options.params,
responseType: options.responseType,
fetchPolicy: options.fetchPolicy,
withCredentials: options.withCredentials,
});
return api
.request(request)
.pipe(
map((res: any) =>
options.observe === undefined || options.observe === 'body'
? res.body
: res
)
);
}
delete(
resource: ODataResource<any>,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<ArrayBuffer>;
delete(
resource: ODataResource<any>,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<Blob>;
delete(
resource: ODataResource<any>,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<string>;
delete(
resource: ODataResource<any>,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<HttpEvent<ArrayBuffer>>;
delete(
resource: ODataResource<any>,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<HttpEvent<Blob>>;
delete(
resource: ODataResource<any>,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<HttpEvent<string>>;
delete(
resource: ODataResource<any>,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<HttpEvent<Object>>;
delete<T>(
resource: ODataResource<any>,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<HttpEvent<T>>;
delete(
resource: ODataResource<any>,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<HttpResponse<ArrayBuffer>>;
delete(
resource: ODataResource<any>,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<HttpResponse<Blob>>;
delete(
resource: ODataResource<any>,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<ODataResponse<string>>;
delete(
resource: ODataResource<any>,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<ODataResponse<Object>>;
delete<T>(
resource: ODataResource<any>,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<ODataResponse<T>>;
delete(
resource: ODataResource<any>,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<Object>;
delete<T>(
resource: ODataResource<any>,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<T>;
delete(
resource: ODataResource<any>,
options: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body' | 'events' | 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'arraybuffer' | 'blob' | 'json' | 'text';
withCredentials?: boolean;
} = {}
): Observable<any> {
return this.request<any>('DELETE', resource, addBody<any>(options, null));
}
get(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<ArrayBuffer>;
get(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<Blob>;
get(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<string>;
get(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<HttpEvent<ArrayBuffer>>;
get(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<HttpEvent<Blob>>;
get(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<HttpEvent<string>>;
get(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<HttpEvent<Object>>;
get<T>(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<HttpEvent<T>>;
get(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<HttpResponse<ArrayBuffer>>;
get(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<HttpResponse<Blob>>;
get(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<ODataResponse<string>>;
get(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<ODataResponse<Object>>;
get<T>(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<ODataResponse<T>>;
get(
resource: ODataResource<any>,
options?: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<Object>;
get<T>(
resource: ODataResource<any>,
options?: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<T>;
get(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body' | 'events' | 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'arraybuffer' | 'blob' | 'json' | 'text';
withCredentials?: boolean;
} = {}
): Observable<any> {
return this.request<any>('GET', resource, options as any);
}
head(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<ArrayBuffer>;
head(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<Blob>;
head(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<string>;
head(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<HttpEvent<ArrayBuffer>>;
head(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<HttpEvent<Blob>>;
head(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<HttpEvent<string>>;
head(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<HttpEvent<Object>>;
head<T>(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<HttpEvent<T>>;
head(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<HttpResponse<ArrayBuffer>>;
head(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<HttpResponse<Blob>>;
head(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<ODataResponse<string>>;
head(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<ODataResponse<Object>>;
head<T>(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<ODataResponse<T>>;
head(
resource: ODataResource<any>,
options?: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<Object>;
head<T>(
resource: ODataResource<any>,
options?: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<T>;
head(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body' | 'events' | 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'arraybuffer' | 'blob' | 'json' | 'text';
withCredentials?: boolean;
} = {}
): Observable<any> {
return this.request<any>('HEAD', resource, options as any);
}
jsonp(
resource: ODataResource<any>,
callbackParam: string
): Observable<Object>;
jsonp<T>(resource: ODataResource<any>, callbackParam: string): Observable<T>;
jsonp<T>(resource: ODataResource<any>, callbackParam: string): Observable<T> {
return this.request<any>('JSONP', resource, {
body: null,
params: new HttpParams().append(callbackParam, 'JSONP_CALLBACK'),
observe: 'body',
responseType: 'json',
});
}
options(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<ArrayBuffer>;
options(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<Blob>;
options(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<string>;
options(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<HttpEvent<ArrayBuffer>>;
options(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<HttpEvent<Blob>>;
options(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<HttpEvent<string>>;
options(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<HttpEvent<Object>>;
options(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<HttpEvent<Object>>;
options<T>(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<HttpEvent<T>>;
options(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<HttpResponse<ArrayBuffer>>;
options(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<HttpResponse<Blob>>;
options(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<ODataResponse<string>>;
options(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<ODataResponse<Object>>;
options<T>(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<ODataResponse<T>>;
options(
resource: ODataResource<any>,
options?: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<Object>;
options<T>(
resource: ODataResource<any>,
options?: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<T>;
options(
resource: ODataResource<any>,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body' | 'events' | 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'arraybuffer' | 'blob' | 'json' | 'text';
withCredentials?: boolean;
} = {}
): Observable<any> {
return this.request<any>('OPTIONS', resource, options as any);
}
patch(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<ArrayBuffer>;
patch(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<Blob>;
patch(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<string>;
patch(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<HttpEvent<ArrayBuffer>>;
patch(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<HttpEvent<Blob>>;
patch(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<HttpEvent<string>>;
patch(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<HttpEvent<Object>>;
patch<T>(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<HttpEvent<T>>;
patch(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<HttpResponse<ArrayBuffer>>;
patch(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<HttpResponse<Blob>>;
patch(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<ODataResponse<string>>;
patch(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<ODataResponse<Object>>;
patch<T>(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<ODataResponse<T>>;
patch(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<Object>;
patch<T>(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<T>;
patch(
resource: ODataResource<any>,
body: any | null,
options: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body' | 'events' | 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'arraybuffer' | 'blob' | 'json' | 'text';
withCredentials?: boolean;
} = {}
): Observable<any> {
return this.request<any>('PATCH', resource, addBody(options, body));
}
post(
resource: ODataResource<any>,
body: any | null,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<ArrayBuffer>;
post(
resource: ODataResource<any>,
body: any | null,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<Blob>;
post(
resource: ODataResource<any>,
body: any | null,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<string>;
post(
resource: ODataResource<any>,
body: any | null,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<HttpEvent<ArrayBuffer>>;
post(
resource: ODataResource<any>,
body: any | null,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<HttpEvent<Blob>>;
post(
resource: ODataResource<any>,
body: any | null,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<HttpEvent<string>>;
post(
resource: ODataResource<any>,
body: any | null,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<HttpEvent<Object>>;
post<T>(
resource: ODataResource<any>,
body: any | null,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<HttpEvent<T>>;
post(
resource: ODataResource<any>,
body: any | null,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<HttpResponse<ArrayBuffer>>;
post(
resource: ODataResource<any>,
body: any | null,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<HttpResponse<Blob>>;
post(
resource: ODataResource<any>,
body: any | null,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<ODataResponse<string>>;
post(
resource: ODataResource<any>,
body: any | null,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<ODataResponse<Object>>;
post<T>(
resource: ODataResource<any>,
body: any | null,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<ODataResponse<T>>;
post(
resource: ODataResource<any>,
body: any | null,
options?: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<Object>;
post<T>(
resource: ODataResource<any>,
body: any | null,
options?: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<T>;
post(
resource: ODataResource<any>,
body: any | null,
options: {
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body' | 'events' | 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'arraybuffer' | 'blob' | 'json' | 'text';
withCredentials?: boolean;
} = {}
): Observable<any> {
return this.request<any>('POST', resource, addBody(options, body));
}
put(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<ArrayBuffer>;
put(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<Blob>;
put(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<string>;
put(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<HttpEvent<ArrayBuffer>>;
put(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<HttpEvent<Blob>>;
put(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<HttpEvent<string>>;
put(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<HttpEvent<Object>>;
put<T>(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'events';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<HttpEvent<T>>;
put(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'arraybuffer';
withCredentials?: boolean;
}
): Observable<HttpResponse<ArrayBuffer>>;
put(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'blob';
withCredentials?: boolean;
}
): Observable<HttpResponse<Blob>>;
put(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType: 'text';
withCredentials?: boolean;
}
): Observable<ODataResponse<string>>;
put(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<ODataResponse<Object>>;
put<T>(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe: 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<ODataResponse<T>>;
put(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<Object>;
put<T>(
resource: ODataResource<any>,
body: any | null,
options?: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'json';
withCredentials?: boolean;
}
): Observable<T>;
put(
resource: ODataResource<any>,
body: any | null,
options: {
etag?: string;
apiName?: string;
fetchPolicy?:
| 'cache-first'
| 'cache-and-network'
| 'network-only'
| 'no-cache'
| 'cache-only';
headers?: HttpHeaders | { [header: string]: string | string[] };
observe?: 'body' | 'events' | 'response';
params?: HttpParams | { [param: string]: string | string[] };
reportProgress?: boolean;
responseType?: 'arraybuffer' | 'blob' | 'json' | 'text';
withCredentials?: boolean;
} = {}
): Observable<any> {
return this.request<any>('PUT', resource, addBody(options, body));
}
} | the_stack |
import createDebugger = require('debug');
import jwt = require('jsonwebtoken');
import { Pool, PoolClient, QueryConfig, QueryResult } from 'pg';
import { ExecutionResult, OperationDefinitionNode, Kind } from 'graphql';
import * as sql from 'pg-sql2';
import { $$pgClient } from '../postgres/inventory/pgClientFromContext';
import { pluginHookFromOptions } from './pluginHook';
import { mixed, WithPostGraphileContextOptions } from '../interfaces';
import { formatSQLForDebugging } from 'postgraphile-core';
const undefinedIfEmpty = (
o?: Array<string | RegExp> | string | RegExp,
): undefined | Array<string | RegExp> | string | RegExp =>
o && (!Array.isArray(o) || o.length) ? o : undefined;
interface PostGraphileContext {
[$$pgClient]: PoolClient;
[key: string]: PoolClient | mixed;
}
export type WithPostGraphileContextFn<TResult = ExecutionResult> = (
options: WithPostGraphileContextOptions,
callback: (context: PostGraphileContext) => Promise<TResult>,
) => Promise<TResult>;
const debugPg = createDebugger('postgraphile:postgres');
const debugPgError = createDebugger('postgraphile:postgres:error');
const debugPgNotice = createDebugger('postgraphile:postgres:notice');
/**
* Formats an error/notice from `pg` and feeds it into a `debug` function.
*/
function debugPgErrorObject(debugFn: createDebugger.IDebugger, object: PgNotice) {
debugFn(
'%s%s: %s%s%s',
object.severity || 'ERROR',
object.code ? `[${object.code}]` : '',
object.message || object,
object.where ? ` | WHERE: ${object.where}` : '',
object.hint ? ` | HINT: ${object.hint}` : '',
);
}
type WithAuthenticatedPgClientFunction = <T>(
cb: (pgClient: PoolClient) => Promise<T>,
) => Promise<T>;
function swallowErrors() {
/* noop */
}
const simpleWithPgClientCache = new WeakMap<Pool, WithAuthenticatedPgClientFunction>();
function simpleWithPgClient(pgPool: Pool) {
const cached = simpleWithPgClientCache.get(pgPool);
if (cached) {
return cached;
}
const func: WithAuthenticatedPgClientFunction = async cb => {
const pgClient = await pgPool.connect();
pgClient.on('error', swallowErrors);
try {
return await cb(pgClient);
} finally {
pgClient.removeListener('error', swallowErrors);
pgClient.release();
}
};
simpleWithPgClientCache.set(pgPool, func);
return func;
}
const withDefaultPostGraphileContext: WithPostGraphileContextFn = async (
options: WithPostGraphileContextOptions,
callback: (context: PostGraphileContext) => Promise<ExecutionResult>,
): Promise<ExecutionResult> => {
const {
pgPool,
jwtToken,
jwtSecret,
jwtPublicKey,
jwtAudiences,
jwtRole = ['role'],
jwtVerifyOptions,
pgDefaultRole,
pgSettings,
explain,
queryDocumentAst,
operationName,
pgForceTransaction,
singleStatement,
} = options;
let operation: OperationDefinitionNode | void;
if (!pgForceTransaction && queryDocumentAst) {
// tslint:disable-next-line
for (let i = 0, l = queryDocumentAst.definitions.length; i < l; i++) {
const definition = queryDocumentAst.definitions[i];
if (definition.kind === Kind.OPERATION_DEFINITION) {
if (!operationName && operation) {
throw new Error(
'Multiple operations present in GraphQL query, you must specify an `operationName` so we know which one to execute.',
);
} else if (!operationName || (definition.name && definition.name.value === operationName)) {
operation = definition;
}
}
}
}
// Warning: this is only set if pgForceTransaction is falsy
const operationType = operation != null ? operation.operation : null;
const { role: pgRole, localSettings, jwtClaims } = await getSettingsForPgClientTransaction({
jwtToken,
jwtSecret,
jwtPublicKey,
jwtAudiences,
jwtRole,
jwtVerifyOptions,
pgDefaultRole,
pgSettings,
});
const sqlSettings: Array<sql.SQLQuery> = [];
if (localSettings.length > 0) {
// Later settings should win, so we're going to loop backwards and not
// add settings for keys we've already seen.
const seenKeys: Array<string> = [];
// TODO:perf: looping backwards is slow
for (let i = localSettings.length - 1; i >= 0; i--) {
const [key, value] = localSettings[i];
if (!seenKeys.includes(key)) {
seenKeys.push(key);
// Make sure that the third config is always `true` so that we are only
// ever setting variables on the transaction.
// Also, we're using `unshift` to undo the reverse-looping we're doing
sqlSettings.unshift(sql.fragment`set_config(${sql.value(key)}, ${sql.value(value)}, true)`);
}
}
}
const sqlSettingsQuery =
sqlSettings.length > 0 ? sql.compile(sql.query`select ${sql.join(sqlSettings, ', ')}`) : null;
// If we can avoid transactions, we get greater performance.
const needTransaction =
pgForceTransaction ||
!!sqlSettingsQuery ||
(operationType !== 'query' && operationType !== 'subscription');
// Now we've caught as many errors as we can at this stage, let's create a DB connection.
const withAuthenticatedPgClient: WithAuthenticatedPgClientFunction = !needTransaction
? simpleWithPgClient(pgPool)
: async cb => {
// Connect a new Postgres client
const pgClient = await pgPool.connect();
pgClient.on('error', swallowErrors);
try {
// Begin our transaction
await pgClient.query('begin');
try {
// If there is at least one local setting, load it into the database.
if (sqlSettingsQuery) {
await pgClient.query(sqlSettingsQuery);
}
// Use the client, wait for it to be finished with, then go to 'finally'
return await cb(pgClient);
} finally {
// Cleanup our Postgres client by ending the transaction and releasing
// the client back to the pool. Always do this even if the query fails.
await pgClient.query('commit');
}
} finally {
pgClient.removeListener('error', swallowErrors);
pgClient.release();
}
};
if (singleStatement) {
// TODO:v5: remove this workaround
/*
* This is a workaround for subscriptions; the GraphQL context is allocated
* for the entire duration of the subscription, however hogging a pgClient
* for more than a few milliseconds (let alone hours!) is a no-no. So we
* fake a PG client that will set up the transaction each time `query` is
* called. It's a very thin/dumb wrapper, so it supports nothing but
* `query`.
*/
const fakePgClient: PoolClient = {
query(
textOrQueryOptions?: string | QueryConfig,
values?: Array<any>, // tslint:disable-line no-any
cb?: void,
): Promise<QueryResult> {
if (!textOrQueryOptions) {
throw new Error('Incompatible call to singleStatement - no statement passed?');
} else if (typeof textOrQueryOptions === 'object') {
if (values || cb) {
throw new Error('Incompatible call to singleStatement - expected no callback');
}
} else if (typeof textOrQueryOptions !== 'string') {
throw new Error('Incompatible call to singleStatement - bad query');
} else if (values && !Array.isArray(values)) {
throw new Error('Incompatible call to singleStatement - bad values');
} else if (cb) {
throw new Error('Incompatible call to singleStatement - expected to return promise');
}
// Generate an authenticated client on the fly
return withAuthenticatedPgClient(pgClient => pgClient.query(textOrQueryOptions, values));
},
} as any; // tslint:disable-line no-any
return callback({
[$$pgClient]: fakePgClient,
pgRole,
jwtClaims,
});
} else {
return withAuthenticatedPgClient(async pgClient => {
let results: Promise<Array<ExplainResult>> | null = null;
if (explain) {
pgClient.startExplain();
}
try {
return await callback({
[$$pgClient]: pgClient,
pgRole,
jwtClaims,
...(explain
? {
getExplainResults: (): Promise<Array<ExplainResult>> => {
results = results || pgClient.stopExplain();
return results;
},
}
: null),
});
} finally {
if (explain) {
results = results || pgClient.stopExplain();
}
}
});
}
};
/**
* Creates a PostGraphile context object which should be passed into a GraphQL
* execution. This function will also connect a client from a Postgres pool and
* setup a transaction in that client.
*
* This function is intended to wrap a call to GraphQL-js execution like so:
*
* ```js
* const result = await withPostGraphileContext({
* pgPool,
* jwtToken,
* jwtSecret,
* pgDefaultRole,
* }, async context => {
* return await graphql(
* schema,
* query,
* null,
* { ...context },
* variables,
* operationName,
* );
* });
* ```
*/
const withPostGraphileContext: WithPostGraphileContextFn = async (
options: WithPostGraphileContextOptions,
callback: (context: PostGraphileContext) => Promise<ExecutionResult>,
): Promise<ExecutionResult> => {
const pluginHook = pluginHookFromOptions(options);
const withContext = pluginHook('withPostGraphileContext', withDefaultPostGraphileContext, {
options,
});
return withContext(options, callback);
};
export default withPostGraphileContext;
/**
* Sets up the Postgres client transaction by decoding the JSON web token and
* doing some other cool things.
*/
// THIS METHOD SHOULD NEVER RETURN EARLY. If this method returns early then it
// may skip the super important step of setting the role on the Postgres
// client. If this happens it’s a huge security vulnerability. Never using the
// keyword `return` in this function is a good first step. You can still throw
// errors, however, as this will stop the request execution.
async function getSettingsForPgClientTransaction({
jwtToken,
jwtSecret,
jwtPublicKey,
jwtAudiences,
jwtRole,
jwtVerifyOptions,
pgDefaultRole,
pgSettings,
}: {
jwtToken?: string;
jwtSecret?: jwt.Secret;
jwtPublicKey?: jwt.Secret | jwt.GetPublicKeyOrSecret;
jwtAudiences?: Array<string>;
jwtRole: Array<string>;
jwtVerifyOptions?: jwt.VerifyOptions;
pgDefaultRole?: string;
pgSettings?: { [key: string]: mixed };
}): Promise<{
role: string | undefined;
localSettings: Array<[string, string]>;
jwtClaims: { [claimName: string]: mixed } | null;
}> {
// Setup our default role. Once we decode our token, the role may change.
let role = pgDefaultRole;
let jwtClaims: { [claimName: string]: mixed } = {};
// If we were provided a JWT token, let us try to verify it. If verification
// fails we want to throw an error.
if (jwtToken) {
// Try to run `jwt.verify`. If it fails, capture the error and re-throw it
// as a 403 error because the token is not trustworthy.
try {
const jwtVerificationSecret = jwtPublicKey || jwtSecret;
// If a JWT token was defined, but a secret was not provided to the server or
// secret had unsupported type, throw a 403 error.
if (
!Buffer.isBuffer(jwtVerificationSecret) &&
typeof jwtVerificationSecret !== 'string' &&
typeof jwtVerificationSecret !== 'function'
) {
// tslint:disable-next-line no-console
console.error(
`ERROR: '${
jwtPublicKey ? 'jwtPublicKey' : 'jwtSecret'
}' was not set to a string or buffer - rejecting JWT-authenticated request.`,
);
throw new Error('Not allowed to provide a JWT token.');
}
if (jwtAudiences != null && jwtVerifyOptions && 'audience' in jwtVerifyOptions)
throw new Error(
`Provide either 'jwtAudiences' or 'jwtVerifyOptions.audience' but not both`,
);
const claims = await new Promise((resolve, reject) => {
jwt.verify(
jwtToken,
jwtVerificationSecret,
{
...jwtVerifyOptions,
audience:
jwtAudiences ||
(jwtVerifyOptions && 'audience' in (jwtVerifyOptions as Record<string, any>)
? undefinedIfEmpty(jwtVerifyOptions.audience)
: ['postgraphile']),
},
(err, decoded) => {
if (err) reject(err);
else resolve(decoded);
},
);
});
if (typeof claims === 'string') {
throw new Error('Invalid JWT payload');
}
// jwt.verify returns `object | string`; but the `object` part is really a map
jwtClaims = claims as typeof jwtClaims;
const roleClaim = getPath(jwtClaims, jwtRole);
// If there is a `role` property in the claims, use that instead of our
// default role.
if (typeof roleClaim !== 'undefined') {
if (typeof roleClaim !== 'string')
throw new Error(
`JWT \`role\` claim must be a string. Instead found '${typeof jwtClaims['role']}'.`,
);
role = roleClaim;
}
} catch (error) {
// In case this error is thrown in an HTTP context, we want to add status code
// Note. jwt.verify will add a name key to its errors. (https://github.com/auth0/node-jsonwebtoken#errors--codes)
error.statusCode =
'name' in error && error.name === 'TokenExpiredError'
? // The correct status code for an expired ( but otherwise acceptable token is 401 )
401
: // All other authentication errors should get a 403 status code.
403;
throw error;
}
}
// Instantiate a map of local settings. This map will be transformed into a
// Sql query.
const localSettings: Array<[string, string]> = [];
// Set the custom provided settings before jwt claims and role are set
// this prevents an accidentional overwriting
if (pgSettings && typeof pgSettings === 'object') {
for (const key in pgSettings) {
if (
Object.prototype.hasOwnProperty.call(pgSettings, key) &&
isPgSettingValid(pgSettings[key])
) {
if (key === 'role') {
role = String(pgSettings[key]);
} else {
localSettings.push([key, String(pgSettings[key])]);
}
}
}
}
// If there is a rule, we want to set the root `role` setting locally
// to be our role. The role may only be null if we have no default role.
if (typeof role === 'string') {
localSettings.push(['role', role]);
}
// If we have some JWT claims, we want to set those claims as local
// settings with the namespace `jwt.claims`.
for (const key in jwtClaims) {
if (Object.prototype.hasOwnProperty.call(jwtClaims, key)) {
const rawValue = jwtClaims[key];
// Unsafe to pass raw object/array to pg.query -> set_config; instead JSONify
const value: mixed =
rawValue != null && typeof rawValue === 'object' ? JSON.stringify(rawValue) : rawValue;
if (isPgSettingValid(value)) {
localSettings.push([`jwt.claims.${key}`, String(value)]);
}
}
}
return {
localSettings,
role,
jwtClaims: jwtToken ? jwtClaims : null,
};
}
const $$pgClientOrigQuery = Symbol();
interface RawExplainResult {
query: string;
result: any;
}
type ExplainResult = Omit<RawExplainResult, 'result'> & {
plan: string;
};
declare module 'pg' {
interface ClientBase {
_explainResults: Array<RawExplainResult> | null;
startExplain: () => void;
stopExplain: () => Promise<Array<ExplainResult>>;
}
}
/**
* Monkey-patches the `query` method of a pg Client to add debugging
* functionality. Use with care.
*/
export function debugPgClient(pgClient: PoolClient, allowExplain = false): PoolClient {
// If Postgres debugging is enabled, enhance our query function by adding
// a debug statement.
if (!pgClient[$$pgClientOrigQuery]) {
// Set the original query method to a key on our client. If that key is
// already set, use that.
pgClient[$$pgClientOrigQuery] = pgClient.query;
pgClient.startExplain = () => {
pgClient._explainResults = [];
};
pgClient.stopExplain = async () => {
const results = pgClient._explainResults;
pgClient._explainResults = null;
if (!results) {
return Promise.resolve([]);
}
return (
await Promise.all(
results.map(async r => {
const { result: resultPromise, ...rest } = r;
const result = await resultPromise;
const firstKey = result && result[0] && Object.keys(result[0])[0];
if (!firstKey) {
return null;
}
const plan = result.map((r: any) => r[firstKey]).join('\n');
return {
...rest,
plan,
};
}),
)
).filter((entry: unknown): entry is ExplainResult => !!entry);
};
if (debugPgNotice.enabled) {
pgClient.on('notice', (msg: PgNotice) => {
debugPgErrorObject(debugPgNotice, msg);
});
}
const logError = (error: PgNotice | Error) => {
if (error.name && error['severity']) {
debugPgErrorObject(debugPgError, error as PgNotice);
} else {
debugPgError('%O', error);
}
};
if (debugPg.enabled || debugPgNotice.enabled || allowExplain) {
// tslint:disable-next-line only-arrow-functions
pgClient.query = function (...args: Array<any>): any {
const [a, b, c] = args;
// If we understand it (and it uses the promises API)
if (
(typeof a === 'string' && !c && (!b || Array.isArray(b))) ||
(typeof a === 'object' && !b && !c)
) {
if (debugPg.enabled) {
// Debug just the query text. We don’t want to debug variables because
// there may be passwords in there.
debugPg('%s', formatSQLForDebugging(a && a.text ? a.text : a));
}
if (pgClient._explainResults) {
const query = a && a.text ? a.text : a;
const values = a && a.text ? a.values : b;
if (query.match(/^\s*(select|insert|update|delete|with)\s/i) && !query.includes(';')) {
// Explain it
const explain = `explain ${query}`;
pgClient._explainResults.push({
query,
result: pgClient[$$pgClientOrigQuery]
.call(this, explain, values)
.then((data: any) => data.rows)
// swallow errors during explain
.catch(() => null),
});
}
}
const promiseResult = pgClient[$$pgClientOrigQuery].apply(this, args);
if (debugPgError.enabled) {
// Report the error with our Postgres debugger.
promiseResult.catch(logError);
}
return promiseResult;
} else {
// We don't understand it (e.g. `pgPool.query`), just let it happen.
return pgClient[$$pgClientOrigQuery].apply(this, args);
}
};
}
}
return pgClient;
}
/**
* Safely gets the value at `path` (array of keys) of `inObject`.
*
* @private
*/
function getPath(inObject: mixed, path: Array<string>): any {
let object = inObject;
// From https://github.com/lodash/lodash/blob/master/.internal/baseGet.js
let index = 0;
const length = path.length;
while (object && index < length) {
object = object[path[index++]];
}
return index && index === length ? object : undefined;
}
/**
* Check if a pgSetting is a string or a number.
* Null and Undefined settings are not valid and will be ignored.
* pgSettings of other types throw an error.
*
* @private
*/
function isPgSettingValid(pgSetting: mixed): boolean {
if (pgSetting === undefined || pgSetting === null) {
return false;
}
const typeOfPgSetting = typeof pgSetting;
if (
typeOfPgSetting === 'string' ||
typeOfPgSetting === 'number' ||
typeOfPgSetting === 'boolean'
) {
return true;
}
// TODO: booleans!
throw new Error(
`Error converting pgSetting: ${typeof pgSetting} needs to be of type string, number or boolean.`,
);
}
// tslint:enable no-any
interface PgNotice {
readonly name: 'notice';
readonly message: string | undefined;
length: number;
severity: string | undefined;
code: string | undefined;
detail: string | undefined;
hint: string | undefined;
where: string | undefined;
schema: string | undefined;
table: string | undefined;
column: string | undefined;
constraint: string | undefined;
file: string | undefined;
line: string | undefined;
routine: string | undefined;
/*
Not sure what these are:
position: any;
internalPosition: any;
internalQuery: any;
dataType: any;
*/
} | the_stack |
import React from "react";
import {GUI} from "dat.gui";
import {
AxesHelper,
Camera,
Clock,
Color,
GridHelper,
Material,
OrthographicCamera,
PerspectiveCamera,
Scene,
WebGLRenderer
} from "three";
import {OrbitControls} from "three/examples/jsm/controls/OrbitControls";
import {PAGES} from "../Constants";
import GUISession from "../utils/GUISession";
import {createCubeEnv} from "../utils/cubeEnv";
import ECS from "ecs-lib";
import SceneObjectSystem from "../system/SceneObjectSystem";
import LogSystem from "../system/LogSystem";
const ENVMAPS = [
{
texture: 'lake',
title: 'Lake'
},
{
texture: 'bridge',
title: 'Bridge'
},
{
texture: 'miramar',
title: 'Miramar'
}
];
type Props = {};
type State = {
world?: ECS;
scene?: Scene;
camera?: Camera;
renderer?: WebGLRenderer;
gui: GUI,
page?: typeof React.Component,
// A Sessão é o experimento ativo no momento
session?: GUISession
};
export class IndexPage extends React.PureComponent<Props, State> {
state: State = {
gui: new GUI()
};
private pageRef = React.createRef<any>();
componentDidMount(): void {
const gui = this.state.gui;
gui.width = 300;
const APPKEY = 'ecs-lib-examples-';
class StorageProxy {
constructor(private key: string, private type: 'number' | 'bool' | 'string' = 'number') {
}
get(def?: any): any {
switch (this.type) {
case 'number':
return Number.parseInt(window.localStorage.getItem(APPKEY + this.key) || (def ? '' + def : undefined) || '0');
break;
case 'bool':
let value = window.localStorage.getItem(APPKEY + this.key);
if (value === 'true') {
return true;
}
if (value === 'false') {
return false;
}
if (def === undefined) {
return true;
}
return def;
break;
case 'string':
return window.localStorage.getItem(APPKEY + this.key) || def;
break;
}
}
set(value: any) {
window.localStorage.setItem(APPKEY + this.key, value);
}
}
// Funções a serem executadas sempre que alterar a página
const onPageChange: Array<Function> = [];
var scene: Scene,
camera: Camera,
controls: OrbitControls,
HEIGHT = window.innerHeight,
WIDTH = window.innerWidth,
windowHalfX = WIDTH / 2,
windowHalfY = HEIGHT / 2,
cubeEnv: any;
const renderer = new WebGLRenderer({
canvas: document.getElementById('canvas') as HTMLCanvasElement,
// alpha: true,
antialias: true
});
renderer.setSize(WIDTH, HEIGHT);
renderer.setPixelRatio(window.devicePixelRatio);
let aspect = WIDTH / HEIGHT;
var frustumSize = 120;
const perspectiveCamera = new PerspectiveCamera(60, aspect, 1, 2000);
const orthographicCamera = new OrthographicCamera(
frustumSize * aspect / -2,
frustumSize * aspect / 2,
frustumSize / 2,
frustumSize / -2,
-120,
2000
);
perspectiveCamera.position.y = 60;
perspectiveCamera.position.x = 60;
perspectiveCamera.position.z = 60;
orthographicCamera.position.y = 60;
orthographicCamera.position.x = 60;
orthographicCamera.position.z = 60;
const perspectiveControls = new OrbitControls(perspectiveCamera, renderer.domElement);
const orthographicControls = new OrbitControls(orthographicCamera, renderer.domElement);
perspectiveControls.enableKeys = false;
orthographicControls.enableKeys = false;
// ----------------------------------------------------------------
// CONTROLE DE RENDERIZAÇÃO
// ----------------------------------------------------------------
(() => {
window.addEventListener('resize', function () {
HEIGHT = window.innerHeight;
WIDTH = window.innerWidth;
windowHalfX = WIDTH / 2;
windowHalfY = HEIGHT / 2;
if (renderer) {
renderer.setSize(WIDTH, HEIGHT);
}
let aspect = WIDTH / HEIGHT;
perspectiveCamera.aspect = aspect;
perspectiveCamera.updateProjectionMatrix();
orthographicCamera.left = frustumSize * aspect / -2;
orthographicCamera.right = frustumSize * aspect / 2;
orthographicCamera.top = frustumSize / 2;
orthographicCamera.bottom = frustumSize / -2;
orthographicCamera.updateProjectionMatrix();
if (cubeEnv) {
cubeEnv.onResize(aspect);
}
}, false);
const render = () => {
if (scene && camera) {
// Render page
if (this.pageRef.current && this.pageRef.current.render3D) {
this.pageRef.current.render3D();
}
renderer.render(scene, camera);
}
};
let clock = new Clock();
let delta = 0;
// 60 fps
let interval = 1 / 60;
const animate = () => {
requestAnimationFrame(animate);
// Update ECS
if (this.state.world) {
this.state.world.update();
}
perspectiveControls.update();
orthographicControls.update();
delta += clock.getDelta();
// Animate page
if (this.pageRef.current && this.pageRef.current.animate3D) {
this.pageRef.current.animate3D();
}
if (delta > interval) {
// The draw or time dependent code are here
render();
delta = delta % interval;
}
};
animate();
})();
// ----------------------------------------------------------------
// SELEÇÃO DE PÁGINA (Experimento/Ferramenta)
// ----------------------------------------------------------------
(() => {
const pages: {
[key: string]: any
} = {};
PAGES.map((page, i) => {
pages[page.title] = i;
});
const pageStorage = new StorageProxy('page');
const pageParams = {
page: pageStorage.get()
};
const pageController = gui.add(pageParams, 'page', pages)
.onChange((index) => {
pageStorage.set(pageParams.page);
const page = PAGES[pageParams.page];
const oldSession = this.state.session;
// Realiza a limpeza da página anerior
this.setState({
page: undefined,
session: new GUISession(this.state.gui, page.title)
}, () => {
// Remove os controles criados pelo experimento anterior
if (oldSession) {
oldSession.destroy();
}
if (scene) {
// Remove os elementos inseridos na página atual, incluindo a própria cena
scene.dispose();
}
scene = new Scene();
scene.background = new Color(0x888888);
// scene.fog = new Fog(scene.background, 10, 20);
// scene.add(new Mesh(new BoxBufferGeometry(20, 20, 20), new MeshLambertMaterial({color: Math.random() * 0xffffff})));
onPageChange.forEach(fn => {
fn();
});
if (this.state.world) {
this.state.world.destroy();
}
let world = new ECS([
new SceneObjectSystem(scene),
new LogSystem()
]);
// Renderiza a nova página
this.setState({
scene: scene,
camera: camera,
renderer: renderer,
page: page,
world: world
});
});
});
setTimeout(function () {
pageController.setValue(pageParams.page)
}, 10)
})();
// ----------------------------------------------------------------
// AMBIENTE
// ----------------------------------------------------------------
var environment = gui.addFolder('Environment');
setTimeout(() => {
// ----------------------------------------------------------------
// CAMERA
// ----------------------------------------------------------------
const cameras = {
Perspective: 'P',
Orthographic: 'O',
};
const cameraStorage = new StorageProxy('camera', 'string');
const cameraParams = {
camera: cameraStorage.get('P'),
};
let cameraUpdate = () => {
cameraStorage.set(cameraParams.camera);
if (cameraParams.camera === 'P') {
// Perspective
camera = perspectiveCamera;
} else {
// Orthographic
camera = orthographicCamera;
}
this.setState({
camera: camera,
});
};
onPageChange.push(cameraUpdate);
environment.add(cameraParams, 'camera', cameras)
.onChange(cameraUpdate)
.setValue(cameraParams.camera);
// ----------------------------------------------------------------
// ENVMAP
// ----------------------------------------------------------------
const envmaps: {
[key: string]: any
} = {};
ENVMAPS.forEach((item, i) => {
envmaps[item.title] = i;
});
const envmapStorage = new StorageProxy('envmap');
const envmapParams = {
envmap: envmapStorage.get(),
};
let envmapUpdate = () => {
envmapStorage.set(envmapParams.envmap);
const envmap = ENVMAPS[envmapParams.envmap].texture;
if (cubeEnv) {
cubeEnv.destroy();
}
if (renderer) {
cubeEnv = createCubeEnv(envmap, WIDTH / HEIGHT, renderer);
}
};
onPageChange.push(envmapUpdate);
environment.add(envmapParams, 'envmap', envmaps)
.onChange(envmapUpdate)
.setValue(envmapParams.envmap);
// ----------------------------------------------------------------
// GRID
// ----------------------------------------------------------------
const grid = environment.addFolder('Grid');
const gridStorageShow = new StorageProxy('grid-show', 'bool');
const gridStorageSize = new StorageProxy('grid-size');
const gridStorageDivisions = new StorageProxy('grid-divisions');
const gridStorageColor1 = new StorageProxy('grid-color1', 'string');
const gridStorageColor2 = new StorageProxy('grid-color2', 'string');
const gridParams = {
show: gridStorageShow.get(),
size: gridStorageSize.get(200),
divisions: gridStorageDivisions.get(20),
color1: gridStorageColor1.get('#9923D2'),
color2: gridStorageColor2.get('#F5D0FE'),
};
var gridHelper: GridHelper;
let gridUpdate = function () {
gridStorageShow.set(gridParams.show);
gridStorageSize.set(gridParams.size);
gridStorageDivisions.set(gridParams.divisions);
gridStorageColor1.set(gridParams.color1);
gridStorageColor2.set(gridParams.color2);
if (!scene) {
return;
}
if (gridHelper) {
scene.remove(gridHelper);
gridHelper = undefined;
}
if (gridParams.show) {
gridHelper = new GridHelper(gridParams.size, gridParams.divisions, gridParams.color1, gridParams.color2);
scene.add(gridHelper);
}
};
onPageChange.push(gridUpdate);
grid.add(gridParams, 'show').onChange(gridUpdate).setValue(gridParams.show);
grid.add(gridParams, 'size', 10, 500, 5).onChange(gridUpdate).setValue(gridParams.size);
grid.add(gridParams, 'divisions', 5, 50, 5).onChange(gridUpdate).setValue(gridParams.divisions);
grid.addColor(gridParams, 'color1').onChange(gridUpdate).setValue(gridParams.color1);
grid.addColor(gridParams, 'color2').onChange(gridUpdate).setValue(gridParams.color2);
// ----------------------------------------------------------------
// AXIS
// ----------------------------------------------------------------
const axes = environment.addFolder('Axes');
const axesStorageShow = new StorageProxy('axes-show', 'bool');
const axesStorageSize = new StorageProxy('axes-size');
const axesStorageDepthTest = new StorageProxy('axes-depthTest', 'bool');
const axesParams = {
show: axesStorageShow.get(),
size: axesStorageSize.get(100),
depthTest: axesStorageDepthTest.get(false),
};
var axesHelper: AxesHelper;
let axesUpdate = function () {
axesStorageShow.set(axesParams.show);
axesStorageDepthTest.set(axesParams.depthTest);
axesStorageSize.set(axesParams.size);
if (!scene) {
return;
}
if (axesHelper) {
scene.remove(axesHelper);
axesHelper = undefined;
}
if (axesParams.show) {
axesHelper = new AxesHelper(axesParams.size);
if (!axesParams.depthTest) {
(axesHelper.material as Material).depthTest = false;
axesHelper.renderOrder = 1;
}
scene.add(axesHelper);
}
};
onPageChange.push(axesUpdate);
axes.add(axesParams, 'show').onChange(axesUpdate).setValue(axesParams.show);
axes.add(axesParams, 'size', 10, 260, 5).onChange(axesUpdate).setValue(axesParams.size);
axes.add(axesParams, 'depthTest').onChange(axesUpdate).setValue(axesParams.depthTest);
}, 100);
}
render() {
const PageComponent = this.state.page;
return (
<div>
{
PageComponent
? (
<div>
<div id={'page-title'}>
<h1>ECS (Entity Component System) library for game programming</h1>
<a href="https://github.com/nidorx/ecs-lib/tree/master/example">https://github.com/nidorx/ecs-lib/tree/master/example</a>
<h2>{(PageComponent as any).title}</h2>
{(PageComponent as any).help}
</div>
<PageComponent
ref={this.pageRef}
gui={this.state.session}
world={this.state.world}
scene={this.state.scene}
camera={this.state.camera}
renderer={this.state.renderer}
/>
</div>
)
: null
}
</div>
);
}
} | the_stack |
import { SiteBodyUpdate } from '../../../../api/content-rest-api/model/siteBodyUpdate';
import { SiteBodyCreate } from '../../../../api/content-rest-api/model/siteBodyCreate';
import { SitesApi as NewSitesApi } from '../../../../api/content-rest-api/api/sites.api';
import { AlfrescoApi } from '../../../../alfrescoApi';
import { SiteMemberEntry } from '../../../../api/content-rest-api/model/siteMemberEntry';
import { SiteMembershipBodyCreate } from '../../../../api/content-rest-api/model/siteMembershipBodyCreate';
import { SiteEntry } from '../../../../api/content-rest-api/model/siteEntry';
import { SiteContainerEntry } from '../../../../api/content-rest-api/model/siteContainerEntry';
import { SiteContainerPaging } from '../../../../api/content-rest-api/model/siteContainerPaging';
import { SiteMemberPaging } from '../../../../api/content-rest-api/model/siteMemberPaging';
import { SitePaging } from '../../../../api/content-rest-api/model/sitePaging';
import { SiteMembershipBodyUpdate } from '../../../../api/content-rest-api/model/siteMembershipBodyUpdate';
/**
* @deprecated 3.0.0
*/
export class SitesApi {
private sitesApi: NewSitesApi;
public init(alfrescoApi?: AlfrescoApi) {
this.sitesApi = new NewSitesApi(alfrescoApi);
}
/**
* Add a person
* Adds person **personId** as a member of site **siteId**.\n\nYou can set the **role** to one of four types:\n\n* SiteConsumer\n* SiteCollaborator\n* SiteContributor\n* SiteManager\n
* @param {String} siteId The identifier of a site.
* @param {module:model/SiteMemberBody} siteMemberBody The person to add and their role
* data is of type: {module:model/SiteMemberEntry}
*/
addSiteMember(siteId: string, siteMemberBody: SiteMembershipBodyCreate): Promise<SiteMemberEntry> {
return this.sitesApi.createSiteMembership(siteId, siteMemberBody);
}
/**
* Create a site
* Creates a default site with the given details. Unless explicitly specified, the site id will be generated from the site title. The site id must be unique and only contain alphanumeric and/or dash\ncharacters.\n\nFor example, to create a public site called \"Marketing\" the following body could be used:\n```JSON\n{\n \"title\": \"Marketing\",\n \"visibility\": \"PUBLIC\"\n}\n```\n\nThe creation of the (surf) configuration files required by Share can be skipped via the **skipConfiguration** query parameter.\n\n**Please note: if skipped then such a site will *not* work within Share.**\n\nThe addition of the site to the user's site favorites can be skipped via the **skipAddToFavorites** query parameter.\n\nThe creator will be added as a member with Site Manager role.\n
* @param {module:model/SiteBody} siteBody The site details
* @param {Object} opts Optional parameters
* @param {Boolean} opts.skipConfiguration Flag to indicate whether the Share-specific (surf) configuration files for the site should not be created. (default to false)
* @param {Boolean} opts.skipAddToFavorites Flag to indicate whether the site should not be added to the user's site favorites. (default to false)
* data is of type: {module:model/SiteEntry}
*/
createSite(siteBody: SiteBodyCreate, opts?: any): Promise<SiteEntry> {
return this.sitesApi.createSite(siteBody, opts);
}
/**
* Update a site
* Update existing site
* @param {String} siteId The identifier of a site.
* @param {module:model/SiteBody} siteBody The site details
* @param {Object} opts Optional parameters
* @param {string[]} opts.fields A list of field names.\n\nYou can use this parameter to restrict the fields\nreturned within a response if, for example, you want to save on overall bandwidth.\n\nThe list applies to a returned individual\nentity or entries within a collection.\n\nIf the API method also supports the **include**\nparameter, then the fields specified in the **include**\nparameter are returned in addition to those specified in the **fields** parameter.\n
* data is of type: {module:model/SiteEntry}
*/
updateSite(siteId: string, siteBody: SiteBodyUpdate, opts?: any): Promise<SiteEntry> {
return this.sitesApi.updateSite(siteId, siteBody, opts);
}
/**
* Delete a site
* Deletes the site with **siteId**.
* @param {String} siteId The identifier of a site.
* @param {Object} opts Optional parameters
* @param {Boolean} opts.permanent Flag to indicate whether the site should be permanently deleted i.e. bypass the trashcan. (default to false)
*/
deleteSite(siteId: string, opts?: any): Promise<any> {
return this.sitesApi.deleteSite(siteId, opts);
}
/**
* Get a site
* Returns information for site **siteId**.\n\nYou can use the **relations** parameter to include one or more related\nentities in a single response and so reduce network traffic.\n\nThe entity types in Alfresco are organized in a tree structure.\nThe **sites** entity has two children, **containers** and **members**.\nThe following relations parameter returns all the container and member\nobjects related to the site **siteId**:\n\n```\ncontainers,members\n```\n
* @param {String} siteId The identifier of a site.
* @param {Object} opts Optional parameters
* @param {string[]} opts.relations Use the relations parameter to include one or more related entities in a single response.
* @param {string[]} opts.fields A list of field names.\n\nYou can use this parameter to restrict the fields\nreturned within a response if, for example, you want to save on overall bandwidth.\n\nThe list applies to a returned individual\nentity or entries within a collection.\n\nIf the API method also supports the **include**\nparameter, then the fields specified in the **include**\nparameter are returned in addition to those specified in the **fields** parameter.\n
* data is of type: {module:model/SiteEntry}
*/
getSite(siteId: string, opts?: any): Promise<SiteEntry> {
return this.sitesApi.getSite(siteId, opts);
}
/**
* Get a container
* Returns information on the container **containerId** in site **siteId**.
* @param {String} siteId The identifier of a site.
* @param {String} containerId The unique identifier of a site container.
* @param {Object} opts Optional parameters
* @param {string[]} opts.fields A list of field names.\n\nYou can use this parameter to restrict the fields\nreturned within a response if, for example, you want to save on overall bandwidth.\n\nThe list applies to a returned individual\nentity or entries within a collection.\n\nIf the API method also supports the **include**\nparameter, then the fields specified in the **include**\nparameter are returned in addition to those specified in the **fields** parameter.\n
* data is of type: {module:model/SiteContainerEntry}
*/
getSiteContainer(siteId: string, containerId: string, opts?: any): Promise<SiteContainerEntry> {
return this.sitesApi.getSiteContainer(siteId, containerId, opts);
}
/**
* Get containers
* Returns a list of containers information for site identified by **siteId**.
* @param {String} siteId The identifier of a site.
* @param {Object} opts Optional parameters
* @param {Integer} opts.skipCount The number of entities that exist in the collection before those included in this list.
* @param {Integer} opts.maxItems The maximum number of items to return in the list.
* @param {string[]} opts.fields A list of field names.\n\nYou can use this parameter to restrict the fields\nreturned within a response if, for example, you want to save on overall bandwidth.\n\nThe list applies to a returned individual\nentity or entries within a collection.\n\nIf the API method also supports the **include**\nparameter, then the fields specified in the **include**\nparameter are returned in addition to those specified in the **fields** parameter.\n
* data is of type: {module:model/SiteContainerPaging}
*/
getSiteContainers(siteId: string, opts?: any): Promise<SiteContainerPaging> {
return this.sitesApi.listSiteContainers(siteId, opts);
}
/**
* Get a site member
* Returns site membership information for person **personId** on site **siteId**.
* @param {String} siteId The identifier of a site.
* @param {String} personId The identifier of a person.
* @param {Object} opts Optional parameters
* @param {string[]} opts.fields A list of field names.\n\nYou can use this parameter to restrict the fields\nreturned within a response if, for example, you want to save on overall bandwidth.\n\nThe list applies to a returned individual\nentity or entries within a collection.\n\nIf the API method also supports the **include**\nparameter, then the fields specified in the **include**\nparameter are returned in addition to those specified in the **fields** parameter.\n
* data is of type: {module:model/SiteMemberEntry}
*/
getSiteMember(siteId: string, personId: string, opts?: any): Promise<SiteMemberEntry> {
return this.sitesApi.getSiteMembership(siteId, personId, opts);
}
/**
* Get members
* Returns a list of site memberships for site **siteId**.
* @param {String} siteId The identifier of a site.
* @param {Object} opts Optional parameters
* @param {Integer} opts.skipCount The number of entities that exist in the collection before those included in this list.
* @param {Integer} opts.maxItems The maximum number of items to return in the list.
* @param {string[]} opts.fields A list of field names.\n\nYou can use this parameter to restrict the fields\nreturned within a response if, for example, you want to save on overall bandwidth.\n\nThe list applies to a returned individual\nentity or entries within a collection.\n\nIf the API method also supports the **include**\nparameter, then the fields specified in the **include**\nparameter are returned in addition to those specified in the **fields** parameter.\n
* data is of type: {module:model/SiteMemberPaging}
*/
getSiteMembers(siteId: string, opts?: any): Promise<SiteMemberPaging> {
return this.sitesApi.listSiteMemberships(siteId, opts);
}
/**
* Get sites
* Returns a list of sites in this repository. You can sort the list if sites using the **orderBy** parameter.\n**orderBy** specifies the name of one or more\ncomma separated properties.\nFor each property you can optionally specify the order direction.\nBoth of the these **orderBy** examples retrieve sites ordered by ascending name:\n\n```\nname\nname ASC\n```\n\nYou can use the **relations** parameter to include one or more related\nentities in a single response and so reduce network traffic.\n\nThe entity types in Alfresco are organized in a tree structure.\nThe **sites** entity has two children, **containers** and **members**.\nThe following relations parameter returns all the container and member\nobjects related to each site:\n\n```\ncontainers,members\n```\n
* @param {Object} opts Optional parameters
* @param {Integer} opts.skipCount The number of entities that exist in the collection before those included in this list.
* @param {Integer} opts.maxItems The maximum number of items to return in the list.
* @param {String} opts.orderBy A string to control the order of the entities returned.
* @param {string[]} opts.relations Use the relations parameter to include one or more related entities in a single response.
* @param {string[]} opts.fields A list of field names.\n\nYou can use this parameter to restrict the fields\nreturned within a response if, for example, you want to save on overall bandwidth.\n\nThe list applies to a returned individual\nentity or entries within a collection.\n\nIf the API method also supports the **include**\nparameter, then the fields specified in the **include**\nparameter are returned in addition to those specified in the **fields** parameter.\n
* data is of type: {module:model/SitePaging}
*/
getSites(opts?: any): Promise<SitePaging> {
return this.sitesApi.listSites(opts);
}
/**
* Delete a site member
* Removes person **personId** as a member of site **siteId**.
* @param {String} siteId The identifier of a site.
* @param {String} personId The identifier of a person.
*/
removeSiteMember(siteId: string, personId: string): Promise<any> {
return this.sitesApi.deleteSiteMembership(siteId, personId);
}
/**
* Update a site member
* Update the membership of person **personId** in site **siteId**.\n\nYou can set the **role** to one of four types:\n\n* SiteConsumer\n* SiteCollaborator\n* SiteContributor\n* SiteManager\n
* @param {String} siteId The identifier of a site.
* @param {String} personId The identifier of a person.
* @param {module:model/SiteMemberRoleBody} siteMemberRoleBody The persons new role
* data is of type: {module:model/SiteMemberEntry}
*/
updateSiteMember(siteId: string, personId: string, siteMemberRoleBody: SiteMembershipBodyUpdate): Promise<SiteMemberEntry> {
return this.sitesApi.updateSiteMembership(siteId, personId, siteMemberRoleBody);
}
} | the_stack |
import {
Component,
Input,
Output,
EventEmitter,
ChangeDetectorRef,
OnChanges,
OnInit,
OnDestroy,
LOCALE_ID,
Inject,
TemplateRef,
ViewEncapsulation,
HostListener
} from '@angular/core';
import { MediaMatcher } from '@angular/cdk/layout';
import { Subject, Subscription } from 'rxjs';
import {
WeekViewHour
} from 'calendar-utils';
import {
isBefore
} from 'date-fns';
import { ResizeEvent } from 'angular-resizable-element';
import { CalendarDragHelper } from 'angular-calendar/esm2015/modules/common/calendar-drag-helper.provider';
import { CalendarResizeHelper } from 'angular-calendar/esm2015/modules/common/calendar-resize-helper.provider';
import { SchedulerConfig } from './scheduler-config';
import { CalendarEventTimesChangedEventType, DateAdapter } from 'angular-calendar';
import { DragMoveEvent, DragEndEvent, DropEvent } from 'angular-draggable-droppable';
import {
CalendarSchedulerEvent,
SchedulerViewDay,
SchedulerViewHour,
SchedulerViewHourSegment,
SchedulerEventTimesChangedEvent,
SchedulerViewEvent,
SchedulerView
} from './models';
import {
shouldFireDroppedEvent,
isDraggedWithinPeriod,
roundToNearest,
getMinutesMoved,
trackByHourColumn,
trackByDayOrEvent,
trackByHour,
trackByHourSegment,
getMinimumEventHeightInMinutes,
getDefaultEventEnd
} from '../common/utils';
import {
DEFAULT_HOUR_SEGMENTS,
DEFAULT_HOUR_SEGMENT_HEIGHT_PX,
MINUTES_IN_HOUR,
Time,
DAYS_IN_WEEK
} from './utils/calendar-scheduler-utils';
import { CalendarSchedulerUtils } from './utils/calendar-scheduler-utils.provider';
/**
* [ngClass]="getPositioningClasses(event)"
*
* [style.top.px]="event.top"
* [style.height.px]="event.height"
* [style.left.%]="event.left"
* [style.width.%]="event.width"
*
* DRAG & DROP & RESIZE -> https://github.com/mattlewis92/angular-calendar/blob/master/projects/angular-calendar/src/modules/week/calendar-week-view.component.ts
* FLEXBOX -> https://css-tricks.com/snippets/css/a-guide-to-flexbox/
*/
@Component({
selector: 'calendar-scheduler-view',
template: `
<div class="cal-scheduler-view">
<calendar-scheduler-header
[days]="days"
[locale]="locale"
[customTemplate]="headerTemplate"
(dayHeaderClicked)="dayHeaderClicked.emit($event)">
</calendar-scheduler-header>
<div class="cal-scheduler" #calendarContainer>
<div class="cal-scheduler-hour-rows aside">
<div class="cal-scheduler-hour align-center horizontal" *ngFor="let hour of hours; trackBy:trackByHour">
<div class="cal-scheduler-time">
<div class="cal-scheduler-time-segment" *ngFor="let segment of hour.segments"
[style.height.px]="hourSegmentHeight">
{{ segment.date | calendarDate:'dayViewHour':locale }}
</div>
</div>
</div>
</div>
<div class="cal-scheduler-cols aside" #dayColumns
[class.cal-resize-active]="resizes.size > 0"
mwlDroppable
(dragEnter)="eventDragEnter = eventDragEnter + 1"
(dragLeave)="eventDragEnter = eventDragEnter - 1">
<div class="cal-scheduler-col"
*ngFor="let day of view.days; trackBy:trackByHourColumn"
[ngClass]="day?.cssClass"
[style.backgroundColor]="day.backgroundColor">
<div #eventContainer
class="cal-scheduler-event-container"
*ngFor="let event of day.events; trackBy:trackByDayOrEvent"
[ngClass]="event.event?.cssClass"
[hidden]="event.height === 0 && event.width === 0"
[style.top.px]="event.top"
[style.height.px]="event.height"
[style.left.%]="event.left"
[style.width.%]="event.width"
[class.zoom-on-hover]="zoomEventOnHover"
mwlResizable
[resizeSnapGrid]="{left: dayColumnWidth, right: dayColumnWidth, top: eventSnapSize || hourSegmentHeight, bottom: eventSnapSize || hourSegmentHeight}"
[validateResize]="validateResize"
[allowNegativeResizes]="true"
(resizeStart)="resizeStarted(dayColumns, event, $event)"
(resizing)="resizing(event, $event)"
(resizeEnd)="resizeEnded(event)"
mwlDraggable
dragActiveClass="cal-drag-active"
[dropData]="{event: event.event, calendarId: calendarId}"
[dragAxis]="{
x: event.event.draggable && resizes.size === 0,
y: event.event.draggable && resizes.size === 0
}"
[dragSnapGrid]="snapDraggedEvents ? {x: dayColumnWidth, y: eventSnapSize || hourSegmentHeight} : {}"
[ghostDragEnabled]="!snapDraggedEvents"
[validateDrag]="validateDrag"
(dragPointerDown)="dragStarted(dayColumns, eventContainer, event)"
(dragging)="dragMove(event, $event)"
(dragEnd)="dragEnded(event, $event, dayColumnWidth, true)">
<div *ngIf="event.event?.resizable?.beforeStart && !event.startsBeforeDay"
class="cal-resize-handle cal-resize-handle-before-start"
mwlResizeHandle
[resizeEdges]="{
left: true,
top: true
}">
</div>
<calendar-scheduler-event
[day]="day"
[event]="event"
[container]="eventContainer"
[showContent]="showEventContent && event.height >= 75"
[showActions]="showEventActions"
[showStatus]="showEventStatus"
[customTemplate]="eventTemplate"
[eventTitleTemplate]="eventTitleTemplate"
(eventClicked)="eventClicked.emit($event)">
</calendar-scheduler-event>
<div *ngIf="event.event?.resizable?.afterEnd && !event.endsAfterDay"
class="cal-resize-handle cal-resize-handle-after-end"
mwlResizeHandle
[resizeEdges]="{
right: true,
bottom: true
}">
</div>
</div>
<div class="cal-scheduler-hour"
*ngFor="let hour of day.hours; let i = index; trackBy:trackByHour"
[class.cal-even]="i % 2 === 0"
[class.cal-odd]="i % 2 === 1"
[ngClass]="hour.cssClass"
[style.backgroundColor]="hour.backgroundColor"
(mwlClick)="hourClicked.emit({hour: hour})"
[class.cal-past]="day.isPast"
[class.cal-today]="day.isToday"
[class.cal-future]="day.isFuture"
[class.cal-weekend]="day.isWeekend"
[class.cal-in-month]="day.inMonth"
[class.cal-out-month]="!day.inMonth">
<div class="cal-scheduler-hour-segments">
<calendar-scheduler-hour-segment
*ngFor="let segment of hour.segments; trackBy:trackByHourSegment"
[day]="day"
[segment]="segment"
[locale]="locale"
[customTemplate]="cellTemplate"
[hourSegmentHeight]="hourSegmentHeight"
[showHour]="showSegmentHour"
(segmentClicked)="segmentClicked.emit($event)"
mwlDroppable
[dragOverClass]="!dragActive || !snapDraggedEvents ? 'cal-drag-over' : 'null'"
dragActiveClass="cal-drag-active"
(drop)="eventDropped($event, segment.date)">
</calendar-scheduler-hour-segment>
</div>
</div>
</div>
</div>
</div>
</div>
`,
styleUrls: ['./calendar-scheduler-view.component.scss'],
encapsulation: ViewEncapsulation.None
})
export class CalendarSchedulerViewComponent implements OnInit, OnChanges, OnDestroy {
/**
* Number of days shown. This value will be always normalized to DAYS_IN_WEEK (7)
*/
_viewDays: number = DAYS_IN_WEEK;
get viewDays(): number {
return this._viewDays;
}
@Input() set viewDays(value: number) {
this._viewDays = Math.min(value, DAYS_IN_WEEK);
}
/**
* The current view date
*/
@Input() viewDate: Date;
/**
* Specify if the calendar must be resposive on window resize, changing the days showed automatically
*/
@Input() responsive: boolean = false;
/**
* An array of events to display on view
*/
@Input() events: CalendarSchedulerEvent[] = [];
/**
* The number of segments in an hour. Must be one of 1, 2, 4, 6
*/
@Input() hourSegments: 1 | 2 | 4 | 6 = DEFAULT_HOUR_SEGMENTS;
/**
* The height in pixels of each hour segment
*/
@Input() hourSegmentHeight: number = DEFAULT_HOUR_SEGMENT_HEIGHT_PX;
/**
* An array of day indexes (0 = sunday, 1 = monday etc) that will be hidden on the view
*/
@Input() excludeDays: number[] = [];
/**
* Specify if the first day of current scheduler view has to be today or the first day of the week
*/
@Input() startsWithToday: boolean = false;
/**
* Specify if content must be shown or not
*/
@Input() showEventContent: boolean = true;
/**
* Specify if actions must be shown or not
*/
@Input() showEventActions: boolean = true;
/**
* Specify if status must be shown or not
*/
@Input() showEventStatus: boolean = true;
/**
* Specify if hour must be shown on segment or not
*/
@Input() showSegmentHour: boolean = false;
/**
* Specify if event must zoom on mouse hover or not
*/
@Input() zoomEventOnHover: boolean = false;
/**
* A function that will be called before each cell is rendered. The first argument will contain the calendar (day, hour or segment) cell.
* If you add the `cssClass` property to the cell it will add that class to the cell in the template
*/
@Input() dayModifier: Function;
@Input() hourModifier: Function;
@Input() segmentModifier: Function;
@Input() eventModifier: Function;
/**
* An observable that when emitted on will re-render the current view
*/
@Input() refresh: Subject<any>;
/**
* The locale used to format dates
*/
@Input() locale: string;
/**
* The grid size to snap resizing and dragging of events to
*/
@Input() eventSnapSize: number = this.hourSegmentHeight;
/**
* Whether to snap events to a grid when dragging
*/
@Input() snapDraggedEvents: boolean = true;
/**
* The start number of the week
*/
@Input() weekStartsOn: number;
/**
* A custom template to use to replace the header
*/
@Input() headerTemplate: TemplateRef<any>;
/**
* A custom template to use to replace the day cell
*/
@Input() cellTemplate: TemplateRef<any>;
/**
* A custom template to use for week view events
*/
@Input() eventTemplate: TemplateRef<any>;
/**
* A custom template to use for event titles
*/
@Input() eventTitleTemplate: TemplateRef<any>;
/**
* A custom template to use for all day events
*/
@Input() allDayEventTemplate: TemplateRef<any>;
/**
* An array of day indexes (0 = sunday, 1 = monday etc) that indicate which days are weekends
*/
@Input() weekendDays: number[];
/**
* The day start hours in 24 hour time. Must be 0-23
*/
@Input() dayStartHour: number = 0;
/**
* The day start minutes. Must be 0-59
*/
@Input() dayStartMinute: number = 0;
/**
* The day end hours in 24 hour time. Must be 0-23
*/
@Input() dayEndHour: number = 23;
/**
* The day end minutes. Must be 0-59
*/
@Input() dayEndMinute: number = 59;
/**
* Called when view days value changes
*/
@Output() viewDaysChanged: EventEmitter<number> = new EventEmitter<number>();
/**
* Called when a header week day is clicked
*/
@Output() dayHeaderClicked: EventEmitter<{ day: SchedulerViewDay }> = new EventEmitter<{ day: SchedulerViewDay }>();
/**
* Called when the hour is clicked
*/
@Output() hourClicked: EventEmitter<{ hour: SchedulerViewHour }> = new EventEmitter<{ hour: SchedulerViewHour }>();
/**
* Called when the segment is clicked
*/
@Output() segmentClicked: EventEmitter<{ segment: SchedulerViewHourSegment }> = new EventEmitter<{ segment: SchedulerViewHourSegment }>();
/**
* Called when the event is clicked
*/
@Output() eventClicked: EventEmitter<{ event: CalendarSchedulerEvent }> = new EventEmitter<{ event: CalendarSchedulerEvent }>();
/**
* Called when an event is resized or dragged and dropped
*/
@Output() eventTimesChanged: EventEmitter<SchedulerEventTimesChangedEvent> = new EventEmitter<SchedulerEventTimesChangedEvent>();
/**
* @hidden
*/
view: SchedulerView;
/**
* @hidden
*/
refreshSubscription: Subscription;
/**
* @hidden
*/
days: SchedulerViewDay[];
/**
* @hidden
*/
hours: WeekViewHour[] = [];
/**
* @hidden
*/
// resizes: Map<CalendarSchedulerEvent, SchedulerResizeEvent> = new Map();
resizes: Map<CalendarSchedulerEvent, ResizeEvent> = new Map();
/**
* @hidden
*/
eventDragEnter: number = 0;
/**
* @hidden
*/
dragActive: boolean = false;
/**
* @hidden
*/
dragAlreadyMoved = false;
/**
* @hidden
*/
validateResize: (args: any) => boolean;
/**
* @hidden
*/
validateDrag: (args: any) => boolean;
/**
* @hidden
*/
dayColumnWidth: number;
/**
* @hidden
*/
calendarId: symbol = Symbol('angular calendar scheduler view id');
/**
* @hidden
*/
trackByHourColumn = trackByHourColumn;
/**
* @hidden
*/
trackByDayOrEvent = trackByDayOrEvent;
/**
* @hidden
*/
trackByHour = trackByHour;
/**
* @hidden
*/
trackByHourSegment = trackByHourSegment;
mobileQueryXs: MediaQueryList;
mobileQuerySm: MediaQueryList;
mobileQueryListener: (this: MediaQueryList, ev: MediaQueryListEvent) => any;
@HostListener('window:resize', ['$event'])
onResize(event: any) {
this.adjustViewDays();
}
/**
* @hidden
*/
constructor(private cdr: ChangeDetectorRef, @Inject(LOCALE_ID) locale: string, private config: SchedulerConfig,
private utils: CalendarSchedulerUtils, private dateAdapter: DateAdapter, private media: MediaMatcher,
private changeDetectorRef: ChangeDetectorRef) {
this.locale = this.config.locale || locale;
// See 'Responsive breakpoints' at https://getbootstrap.com/docs/4.1/layout/overview/
this.mobileQueryXs = this.media.matchMedia('(max-width: 576px)'); // Extra small devices (portrait phones, less than 576px)
this.mobileQuerySm = this.media.matchMedia('(max-width: 768px)'); // Small devices (landscape phones, less than 768px)
this.mobileQueryListener = () => this.changeDetectorRef.detectChanges();
// this.mobileQueryXs.addEventListener('change', this.mobileQueryListener);
this.mobileQueryXs.addListener(this.mobileQueryListener);
// this.mobileQuerySm.addEventListener('change', this.mobileQueryListener);
this.mobileQuerySm.addListener(this.mobileQueryListener);
}
/**
* @hidden
*/
ngOnInit(): void {
this.adjustViewDays();
if (this.refresh) {
this.refreshSubscription = this.refresh
// tslint:disable-next-line: deprecation
.subscribe({
next: () => {
this.refreshAll();
this.cdr.markForCheck();
},
error: () => {},
complete: () => {}
});
}
}
/**
* @hidden
*/
ngOnChanges(changes: any): void {
if (changes.viewDays || changes.viewDate || changes.excludeDays || changes.weekendDays) {
this.refreshHeader();
}
if (changes.viewDays ||
changes.viewDate ||
changes.events ||
changes.dayStartHour ||
changes.dayEndHour ||
changes.dayStartMinute ||
changes.dayEndMinute ||
changes.excludeDays ||
changes.eventWidth
) {
this.refreshHourGrid();
this.refreshBody();
}
}
/**
* @hidden
*/
ngOnDestroy(): void {
if (this.refreshSubscription) {
this.refreshSubscription.unsubscribe();
}
// this.mobileQueryXs.removeEventListener('change', this.mobileQueryListener);
this.mobileQueryXs.removeListener(this.mobileQueryListener);
// this.mobileQuerySm.removeEventListener('change', this.mobileQueryListener);
this.mobileQuerySm.removeListener(this.mobileQueryListener);
}
setViewDays(viewDays: number) {
const oldViewDays: number = this._viewDays;
this.viewDays = viewDays;
if (this._viewDays !== oldViewDays) {
this.viewDaysChanged.emit(this._viewDays);
this.refreshAll();
}
}
protected adjustViewDays(): void {
const oldViewDays: number = this._viewDays;
if (this.responsive) {
// https://www.digitalocean.com/community/tutorials/angular-breakpoints-angular-cdk
// With a Component: https://www.digitalocean.com/community/tutorials/detect-responsive-screen-sizes-in-angular
// check/set the size
if (this.mobileQueryXs.matches) {
this.viewDays = 1;
} else if (this.mobileQuerySm.matches) {
this.viewDays = 3;
} else {
this.viewDays = DAYS_IN_WEEK;
}
}
if (this._viewDays !== oldViewDays) {
this.viewDaysChanged.emit(this._viewDays);
this.refreshAll();
}
}
protected getPositioningClasses(day: SchedulerViewDay, event: CalendarSchedulerEvent): string {
const classes: string[] = [
this.getDayClass(event.start),
this.getTimeClass(day.date, event),
this.getLengthClass(day.date, event)
];
return classes.join(' ');
}
private getDayClass(date: Date): string {
return '';
}
private getTimeClass(date: Date, event: CalendarSchedulerEvent): string {
if (this.dateAdapter.isSameDay(date, event.start)) {
let hours: number = event.start.getHours();
if (this.dayStartHour > 0) { hours = event.start.getHours() - this.dayStartHour; }
const hoursString: string = hours < 10 ? `0${hours}` : `${hours}`;
const minutesString: string = event.start.getMinutes() < 10 ? `0${event.start.getMinutes()}` : `${event.start.getMinutes()}`;
return `time${hoursString}${minutesString}`;
} else if (isBefore(event.start, this.dateAdapter.startOfDay(date))) {
return `time0000`;
}
}
private getLengthClass(date: Date, event: CalendarSchedulerEvent): string {
if (this.dateAdapter.isSameDay(date, event.start)) {
const durationInMinutes: number = this.dateAdapter.differenceInMinutes(event.end, event.start);
const leftToEndOfDay: number = this.dateAdapter.differenceInMinutes(this.dateAdapter.setMinutes(this.dateAdapter.setHours(event.start, this.dayEndHour + 1), 0), event.start);
return leftToEndOfDay > durationInMinutes ? `length${durationInMinutes}` : `length${leftToEndOfDay}`;
} else if (isBefore(event.start, this.dateAdapter.startOfDay(date))) {
let leftDurationInMinutes: number = 0;
if (this.dateAdapter.isSameDay(date, event.end)) {
leftDurationInMinutes = this.dateAdapter.differenceInMinutes(event.end, this.dateAdapter.startOfDay(date));
if (this.dayStartHour > 0) { leftDurationInMinutes = (event.end.getHours() - this.dayStartHour) * MINUTES_IN_HOUR; }
} else {
leftDurationInMinutes = ((this.dayEndHour + 1) - this.dayStartHour) * MINUTES_IN_HOUR;
}
return `length${leftDurationInMinutes}`;
}
}
private refreshHourGrid(): void {
this.hours = this.utils.getSchedulerViewHourGrid({
viewDate: this.viewDate,
hourSegments: this.hourSegments,
dayStart: {
hour: this.dayStartHour,
minute: this.dayStartMinute
},
dayEnd: {
hour: this.dayEndHour,
minute: this.dayEndMinute
}
});
}
private refreshHeader(): void {
this.days = this.utils.getSchedulerViewDays({
viewDate: this.viewDate,
viewDays: this.viewDays,
weekStartsOn: this.weekStartsOn,
startsWithToday: this.startsWithToday,
excluded: this.excludeDays,
weekendDays: this.weekendDays
});
}
private refreshBody(events?: CalendarSchedulerEvent[]): void {
this.view = this.getSchedulerView(events || this.events);
if (this.dayModifier) {
this.days.forEach(day => this.dayModifier(day));
}
if (this.dayModifier || this.hourModifier || this.segmentModifier) {
this.view.days.forEach(day => {
if (this.dayModifier) {
this.dayModifier(day);
}
day.hours.forEach((hour: SchedulerViewHour) => {
if (this.hourModifier) {
this.hourModifier(hour);
}
hour.segments.forEach((segment: SchedulerViewHourSegment) => {
if (this.segmentModifier) {
this.segmentModifier(segment);
}
});
});
});
}
if (this.eventModifier) {
this.events.forEach(event => this.eventModifier(event));
}
}
private refreshAll(): void {
this.refreshHeader();
this.refreshHourGrid();
this.refreshBody();
}
private getSchedulerView(events: CalendarSchedulerEvent[]): SchedulerView {
return this.utils.getSchedulerView({
events: events,
viewDate: this.viewDate,
viewDays: this.viewDays,
hourSegments: this.hourSegments,
weekStartsOn: this.weekStartsOn,
startsWithToday: this.startsWithToday,
dayStart: <Time>{
hour: this.dayStartHour,
minute: this.dayStartMinute
},
dayEnd: <Time>{
hour: this.dayEndHour,
minute: this.dayEndMinute
},
excluded: this.excludeDays,
eventWidth: 1,
hourSegmentHeight: this.hourSegmentHeight,
logEnabled: this.config.logEnabled
});
}
//#region RESIZE
/**
* @hidden
*/
resizeStarted(eventsContainer: HTMLElement, event: SchedulerViewEvent, resizeEvent: ResizeEvent): void {
this.resizes.set(event.event, resizeEvent);
this.dayColumnWidth = Math.floor(eventsContainer.offsetWidth / this.days.length);
const resizeHelper: CalendarResizeHelper = new CalendarResizeHelper(eventsContainer);
this.validateResize = ({ rectangle }) => resizeHelper.validateResize({ rectangle });
this.cdr.markForCheck();
}
/**
* @hidden
*/
resizing(event: SchedulerViewEvent, resizeEvent: ResizeEvent): void {
this.resizes.set(event.event, resizeEvent);
const adjustedEvents = new Map<CalendarSchedulerEvent, CalendarSchedulerEvent>();
const tempEvents = [...this.events];
this.resizes.forEach((lastResizeEvent, ev) => {
const newEventDates = this.getResizedEventDates(
ev,
lastResizeEvent
);
const adjustedEvent = { ...ev, ...newEventDates };
adjustedEvents.set(adjustedEvent, ev);
const eventIndex = tempEvents.indexOf(ev);
tempEvents[eventIndex] = adjustedEvent;
});
this.restoreOriginalEvents(tempEvents, adjustedEvents);
}
/**
* @hidden
*/
resizeEnded(event: SchedulerViewEvent): void {
this.view = this.getSchedulerView(this.events);
const lastResizeEvent = this.resizes.get(event.event);
this.resizes.delete(event.event);
const newEventDates = this.getResizedEventDates(
event.event,
lastResizeEvent
);
this.eventTimesChanged.emit(
<SchedulerEventTimesChangedEvent>{
newStart: newEventDates.start,
newEnd: newEventDates.end,
event: event.event,
type: CalendarEventTimesChangedEventType.Resize
});
}
private getResizedEventDates(event: CalendarSchedulerEvent, resizeEvent: ResizeEvent): { start: Date, end: Date} {
const minimumEventHeight = getMinimumEventHeightInMinutes(this.hourSegments, this.hourSegmentHeight);
const newEventDates = {
start: event.start,
end: getDefaultEventEnd(this.dateAdapter, event, minimumEventHeight)
};
const { end, ...eventWithoutEnd } = event;
const smallestResizes = {
start: this.dateAdapter.addMinutes(newEventDates.end, minimumEventHeight * -1),
end: getDefaultEventEnd(this.dateAdapter, eventWithoutEnd, minimumEventHeight)
};
if (resizeEvent.edges.left) {
const daysDiff = Math.round(
+resizeEvent.edges.left / this.dayColumnWidth
);
const newStart = this.dateAdapter.addDays(newEventDates.start, daysDiff);
if (newStart < smallestResizes.start) {
newEventDates.start = newStart;
} else {
newEventDates.start = smallestResizes.start;
}
} else if (resizeEvent.edges.right) {
const daysDiff = Math.round(
+resizeEvent.edges.right / this.dayColumnWidth
);
const newEnd = this.dateAdapter.addDays(newEventDates.end, daysDiff);
if (newEnd > smallestResizes.end) {
newEventDates.end = newEnd;
} else {
newEventDates.end = smallestResizes.end;
}
}
if (resizeEvent.edges.top) {
const precision: number = this.eventSnapSize || this.hourSegmentHeight;
const draggedInPixelsSnapSize = Math.round((resizeEvent.edges.top as number) / precision) * precision;
const pixelAmountInMinutes = MINUTES_IN_HOUR / (this.hourSegments * this.hourSegmentHeight);
const minutesMoved = draggedInPixelsSnapSize * pixelAmountInMinutes;
const newStart = this.dateAdapter.addMinutes(newEventDates.start, minutesMoved);
if (newStart < smallestResizes.start) {
newEventDates.start = newStart;
} else {
newEventDates.start = smallestResizes.start;
}
} else if (resizeEvent.edges.bottom) {
const precision: number = this.eventSnapSize || this.hourSegmentHeight;
const draggedInPixelsSnapSize = Math.round((resizeEvent.edges.bottom as number) / precision) * precision;
const pixelAmountInMinutes = MINUTES_IN_HOUR / (this.hourSegments * this.hourSegmentHeight);
const minutesMoved = draggedInPixelsSnapSize * pixelAmountInMinutes;
const newEnd = this.dateAdapter.addMinutes(newEventDates.end, minutesMoved);
if (newEnd > smallestResizes.end) {
newEventDates.end = newEnd;
} else {
newEventDates.end = smallestResizes.end;
}
}
return newEventDates;
}
//#endregion
//#region DRAG & DROP
/**
* @hidden
*/
eventDropped(dropEvent: DropEvent<{ event?: CalendarSchedulerEvent; calendarId?: symbol }>, date: Date): void {
if (shouldFireDroppedEvent(dropEvent, date, this.calendarId)) {
this.eventTimesChanged.emit(
<SchedulerEventTimesChangedEvent>{
type: CalendarEventTimesChangedEventType.Drop,
event: dropEvent.dropData.event,
newStart: date,
newEnd: null
});
}
}
/**
* @hidden
*/
dragStarted(eventsContainer: HTMLElement, eventContainer: HTMLElement, event?: SchedulerViewEvent): void {
this.dayColumnWidth = Math.floor(eventsContainer.offsetWidth / this.days.length);
const dragHelper: CalendarDragHelper = new CalendarDragHelper(
eventsContainer,
eventContainer
);
this.validateDrag = ({ x, y, transform }) =>
this.resizes.size === 0 && dragHelper.validateDrag({
x,
y,
snapDraggedEvents: this.snapDraggedEvents,
dragAlreadyMoved: this.dragAlreadyMoved,
transform
});
this.dragActive = true;
this.dragAlreadyMoved = false;
this.eventDragEnter = 0;
if (!this.snapDraggedEvents && event) {
this.view.days.forEach((day: SchedulerViewDay) => {
const linkedEvent = day.events.find(ev => ev.event === event.event && ev !== event);
// hide any linked events while dragging
if (linkedEvent) {
linkedEvent.width = 0;
linkedEvent.height = 0;
}
});
}
this.cdr.markForCheck();
}
/**
* @hidden
*/
dragMove(event: SchedulerViewEvent, dragEvent: DragMoveEvent) {
if (this.snapDraggedEvents) {
const newEventTimes = this.getDragMovedEventTimes(
event,
dragEvent,
this.dayColumnWidth,
true
);
const originalEvent = event.event;
const adjustedEvent = { ...originalEvent, ...newEventTimes };
const tempEvents = this.events.map(ev => {
if (ev === originalEvent) {
return adjustedEvent;
}
return ev;
});
this.restoreOriginalEvents(tempEvents, new Map([[adjustedEvent, originalEvent]]));
}
this.dragAlreadyMoved = true;
}
dragEnded(event: SchedulerViewEvent, dragEndEvent: DragEndEvent, dayWidth: number, useY = false): void {
this.view = this.getSchedulerView(this.events);
this.dragActive = false;
const { start, end } = this.getDragMovedEventTimes(event, dragEndEvent, dayWidth, useY);
if (
this.eventDragEnter > 0 &&
isDraggedWithinPeriod(start, end, this.view.period)
) {
this.eventTimesChanged.emit(
<SchedulerEventTimesChangedEvent>{
newStart: start,
newEnd: end,
event: event.event,
type: CalendarEventTimesChangedEventType.Drag
});
}
}
private getDragMovedEventTimes(event: SchedulerViewEvent, dragEndEvent: DragEndEvent | DragMoveEvent, dayWidth: number, useY: boolean): { start: Date, end: Date} {
const daysDragged = roundToNearest(dragEndEvent.x, dayWidth) / dayWidth;
const minutesMoved = useY ?
getMinutesMoved(
dragEndEvent.y,
this.hourSegments,
this.hourSegmentHeight,
this.eventSnapSize)
: 0;
const start = this.dateAdapter.addMinutes(
this.dateAdapter.addDays(event.event.start, daysDragged),
minutesMoved
);
let end: Date;
if (event.event.end) {
end = this.dateAdapter.addMinutes(
this.dateAdapter.addDays(event.event.end, daysDragged),
minutesMoved
);
}
return { start, end };
}
private restoreOriginalEvents(tempEvents: CalendarSchedulerEvent[], adjustedEvents: Map<CalendarSchedulerEvent, CalendarSchedulerEvent>) {
this.refreshBody(tempEvents);
const adjustedEventsArray = tempEvents.filter(event => adjustedEvents.has(event));
this.view.days.forEach(day => {
adjustedEventsArray.forEach(adjustedEvent => {
const originalEvent = adjustedEvents.get(adjustedEvent);
const existingColumnEvent = day.events.find(ev => ev.event === adjustedEvent);
if (existingColumnEvent) {
// restore the original event so trackBy kicks in and the dom isn't changed
existingColumnEvent.event = originalEvent;
} else {
// add a dummy event to the drop so if the event was removed from the original column the drag doesn't end early
day.events.push({
event: originalEvent,
left: 0,
top: 0,
height: 0,
width: 0,
startsBeforeDay: false,
endsAfterDay: false
});
}
});
});
adjustedEvents.clear();
}
//#endregion
} | the_stack |
import Ionicons from '@expo/vector-icons/build/Ionicons';
import * as Contacts from 'expo-contacts';
import * as ImagePicker from 'expo-image-picker';
import * as Linking from 'expo-linking';
import * as React from 'react';
import { Platform, RefreshControl, StyleSheet, Text, TouchableOpacity, View } from 'react-native';
import HeaderContainerRight from '../../components/HeaderContainerRight';
import HeaderIconButton from '../../components/HeaderIconButton';
import Colors from '../../constants/Colors';
import usePermissions from '../../utilities/usePermissions';
import ContactDetailList, { DetailListItem } from './ContactDetailList';
import * as ContactUtils from './ContactUtils';
import ContactsAvatar from './ContactsAvatar';
const isIos = Platform.OS === 'ios';
async function getPermissionAsync() {
const { status } = await ImagePicker.getMediaLibraryPermissionsAsync();
if (status !== 'granted') {
Linking.openSettings();
return false;
}
return true;
}
export default function ContactDetailScreen(props: any) {
React.useLayoutEffect(() => {
props.navigation.setOptions({
title: 'Contacts',
headerRight: () => (
<HeaderContainerRight>
<HeaderIconButton
name="md-share"
onPress={async () => {
Contacts.shareContactAsync(props.route.params.id, 'Call me :]');
}}
/>
<HeaderIconButton
name="md-open"
onPress={async () => {
await Contacts.presentFormAsync(props.route.params.id);
// tslint:disable-next-line no-console
console.log('the native contact form has been closed');
}}
/>
{isIos && (
<HeaderIconButton
name="md-copy"
onPress={async () => {
await ContactUtils.cloneAsync(props.route.params.id);
props.navigation.goBack();
}}
/>
)}
</HeaderContainerRight>
),
});
}, [props.navigation]);
const [permission] = usePermissions(Contacts.requestPermissionsAsync);
if (!permission) {
return (
<View style={styles.permissionContainer}>
<Text>No Contact Permission</Text>
</View>
);
}
return <ContactDetailView navigation={props.navigation} route={props.route} />;
}
function ContactDetailView({
navigation,
route: {
params: { id },
},
}: any) {
const [contact, setContact] = React.useState<Contacts.Contact | null>(null);
const [refreshing, setRefreshing] = React.useState(false);
const loadAsync = async () => {
setRefreshing(true);
const contact = await Contacts.getContactByIdAsync(id);
setContact(contact ?? null);
setRefreshing(false);
};
const deleteAsync = async () => {
try {
await Contacts.removeContactAsync(id);
navigation.goBack();
} catch ({ message }) {
// tslint:disable-next-line no-console
console.error(message);
}
};
const jobTitle = React.useMemo<string | null>(() => {
if (!contact) return null;
const { jobTitle, department } = contact;
if (!jobTitle || !department) {
return jobTitle ?? department ?? null;
}
return `${jobTitle} - ${department}`;
}, [contact]);
const subtitles = React.useMemo<string[]>(() => {
if (!contact) return [];
return [
contact.phoneticFirstName,
contact.nickname,
contact.maidenName,
jobTitle,
contact.company,
].filter(Boolean) as string[];
}, [jobTitle, contact]);
const links = React.useMemo<any[]>(() => {
if (!contact) return [];
const phone = ContactUtils.getPrimary<Contacts.PhoneNumber>(contact.phoneNumbers ?? []);
const email = ContactUtils.getPrimary<Contacts.Email>(contact.emails ?? []);
return [
{ icon: 'text', text: 'message', format: 'sms', uri: phone?.number },
{ icon: 'call', text: 'call', format: 'tel', uri: phone?.number },
{ icon: 'videocam', text: 'video', format: 'facetime', uri: email?.email },
{ icon: 'mail', text: 'mail', format: 'mailto', uri: email?.email },
{ icon: 'cash', text: 'pay', format: 'shoebox', uri: email?.email },
];
}, [contact]);
const items = React.useMemo<
{
title: string;
data: DetailListItem[];
}[]
>(() => {
if (!contact) return [];
const items = [];
for (const key of Object.keys(contact)) {
const value = (contact as any)[key];
if (Array.isArray(value) && value.length > 0) {
const data = value.map((item) => {
let transform: Partial<DetailListItem> = {};
switch (key) {
case Contacts.Fields.Relationships:
transform = {
value: item.name,
};
break;
case Contacts.Fields.PhoneNumbers:
transform = {
value: item.number,
onPress: () => Linking.openURL(`tel:${item.number}`),
};
break;
case Contacts.Fields.SocialProfiles:
transform = {
value: item.username,
label: item.label || item.localizedService,
};
break;
case Contacts.Fields.UrlAddresses:
transform = {
value: item.url,
onPress: () => {
const webUrl = item.url.indexOf('://') === -1 ? 'http://' + item.url : item.url;
// tslint:disable-next-line no-console
console.log('open', item.url, webUrl);
Linking.openURL(webUrl);
},
};
break;
case Contacts.Fields.Dates:
transform = {
value: ContactUtils.parseDate(item).toDateString(),
};
break;
case Contacts.Fields.Emails:
transform = {
value: item.email,
onPress: () => Linking.openURL(encodeURI(`mailto:${item.email}`)),
};
break;
case Contacts.Fields.Addresses:
{
const address = ContactUtils.parseAddress(item);
const targetUriAdress = encodeURI(address);
transform = {
value: address,
onPress: () =>
Linking.openURL(
Platform.select<string>({
ios: `http://maps.apple.com/maps?daddr=${targetUriAdress}`,
default: `http://maps.google.com/maps?daddr=${targetUriAdress}`,
})
),
};
}
break;
case Contacts.Fields.InstantMessageAddresses:
transform = {
value: item.username,
};
break;
default:
break;
}
return {
type: key,
...item,
...transform,
};
});
items.push({
title: ContactUtils.parseKey(key),
data,
});
}
}
return items;
}, [contact]);
const onPressImage = async () => {
if (!isIos) {
return;
}
_selectPhoto();
};
React.useEffect(() => {
loadAsync();
}, []);
const _setNewPhoto = async (uri: string) => {
// console.log(this.id, this.state.contact, uri);
try {
await Contacts.updateContactAsync({
[Contacts.Fields.ID]: id,
[Contacts.Fields.Image]: uri,
} as any);
} catch ({ message }) {
// tslint:disable-next-line no-console
console.error(message);
}
loadAsync();
};
const _selectPhoto = async () => {
const permission = await getPermissionAsync();
if (!permission) {
return;
}
const result = await ImagePicker.launchImageLibraryAsync({
allowsEditing: true,
aspect: [4, 3],
});
if (!result.cancelled) {
_setNewPhoto(result.uri);
}
};
const renderListHeaderComponent = () => {
return (
<View style={styles.header}>
<View style={{ alignItems: 'center', marginBottom: 8 }}>
<ContactsAvatar
style={styles.image}
onPress={onPressImage}
name={contact?.name ?? ''}
image={contact?.image?.uri}
/>
<Text style={styles.name}>{contact?.name}</Text>
{subtitles.map((subtitle, index) => (
<Text key={index} style={styles.subtitle}>
{subtitle}
</Text>
))}
</View>
<View style={{ flexDirection: 'row', justifyContent: 'space-between' }}>
{links.map((linkedItem, index) => (
<LinkedButton {...linkedItem} key={index} />
))}
</View>
</View>
);
};
const renderListFooterComponent = () => (
<Text onPress={deleteAsync} style={styles.footer}>
Delete Contact
</Text>
);
if (!contact) {
return <View />;
}
return (
<View style={styles.container}>
<ContactDetailList
refreshControl={<RefreshControl refreshing={refreshing} onRefresh={loadAsync} />}
ListFooterComponent={renderListFooterComponent}
ListHeaderComponent={renderListHeaderComponent}
sections={items}
/>
</View>
);
}
function LinkedButton({
text,
icon,
uri,
format,
}: {
uri?: string | null;
format: string;
text: string;
icon: string;
}) {
const enabled = !!uri;
const color = enabled ? 'white' : 'gray';
const backgroundColor = enabled ? Colors.tintColor : 'transparent';
const onPress = () => Linking.openURL(`${format}:${encodeURIComponent(uri ?? '')}`);
return (
<TouchableOpacity disabled={!enabled} onPress={onPress}>
<View
style={[
styles.linkButton,
{
backgroundColor,
},
]}>
<Ionicons name={`ios-${icon}` as any} size={20} color={color} />
</View>
<Text style={[styles.linkButtonText, { color: backgroundColor }]}>{text}</Text>
</TouchableOpacity>
);
}
const styles = StyleSheet.create({
button: {
marginVertical: 10,
},
container: {
flex: 1,
alignItems: 'stretch',
},
contactRow: {
marginBottom: 12,
},
image: {
marginVertical: 16,
},
name: {
fontSize: 24,
textAlign: 'center',
marginBottom: 6,
},
subtitle: {
opacity: 0.8,
textAlign: 'center',
fontSize: 16,
marginBottom: 2,
},
linkButton: {
width: 40,
height: 40,
borderRadius: 20,
marginBottom: 4,
borderWidth: StyleSheet.hairlineWidth,
borderColor: 'rgba(0,0,0,0.1)',
justifyContent: 'center',
alignItems: 'center',
},
linkButtonText: {
fontSize: 10,
textAlign: 'center',
},
footer: {
width: '100%',
padding: 24,
textAlign: 'center',
justifyContent: 'center',
alignItems: 'center',
color: 'red',
},
header: {
paddingHorizontal: 36,
paddingVertical: 16,
flex: 1,
alignItems: 'stretch',
backgroundColor: Colors.greyBackground,
},
permissionContainer: {
flex: 1,
justifyContent: 'center',
alignItems: 'center',
},
}); | the_stack |
import {
Backup,
Backups,
BACKUPS_PER_DAY,
BACKUPS_PER_NOTEBOOK,
clean,
cleanCells,
cleanConfig,
ClientBackup,
todayTs,
typedUpdate
} from "./client_backup";
import {NotebookCells, UpdateConfig} from "../data/messages";
import {CellComment, CellMetadata, NotebookCell, NotebookConfig} from "../data/data";
import {PosRange} from "../data/result";
import * as MockDate from "mockdate";
const nowTime = new Date()
beforeEach(done => {
MockDate.set(nowTime)
ClientBackup.clearBackups().then(() => done())
})
const sampleConfig = NotebookConfig.default;
const sampleComment = new CellComment("sampleCommentUUID", new PosRange(1, 2), "me", undefined, Date.now(), "this is my comment")
const sampleCells = [
new NotebookCell(0, "scala", "val x = 1", [], undefined, {[sampleComment.uuid]: sampleComment}),
new NotebookCell(1, "python", "dir(x)", [], new CellMetadata().copy({hideOutput: true})),
]
const sampleNotebook = new NotebookCells("somePath", sampleCells, sampleConfig)
describe("ClientBackup", () => {
it("stores backups for a notebook", done => {
const expectedBackups = new Backups(sampleNotebook.path, {
[todayTs()]: [new Backup(sampleNotebook.path, cleanCells(sampleNotebook.cells), cleanConfig(sampleNotebook.config!))]
})
ClientBackup.addNb(sampleNotebook.path, sampleNotebook.cells, sampleNotebook.config)
.then(backup => {
expect(backup).toEqual(expectedBackups)
})
.then(() => {
return ClientBackup.getBackups(sampleNotebook.path)
}).then(backup => {
expect(backup).toEqual(expectedBackups)
})
.then(done)
})
it("stores multiple backups, but only if the notebook has changed", done => {
const changedNbConfig = new NotebookConfig({scala: ["someScalaDep"]})
const expectedBackups = new Backups(sampleNotebook.path, {
[todayTs()]: [
new Backup(sampleNotebook.path, cleanCells(sampleNotebook.cells), cleanConfig(sampleNotebook.config!)),
new Backup(sampleNotebook.path, cleanCells(sampleNotebook.cells), cleanConfig(changedNbConfig))
]
})
// first, demonstrate that adding the same notebook over and over doesn't change the backups
ClientBackup.addNb(sampleNotebook.path, sampleNotebook.cells, sampleNotebook.config)
.then(() => {
return ClientBackup.addNb(sampleNotebook.path, sampleNotebook.cells, sampleNotebook.config)
}).then(() => {
return ClientBackup.addNb(sampleNotebook.path, sampleNotebook.cells, sampleNotebook.config)
}).then(() => {
return ClientBackup.getBackups(sampleNotebook.path)
}).then(backups => {
expect(Object.values(backups.backups)).toHaveLength(1)
// next, add changed notebook
return ClientBackup.addNb(sampleNotebook.path, sampleNotebook.cells, changedNbConfig)
})
.then(() => {
return ClientBackup.getBackups(sampleNotebook.path)
}).then(backups => {
expect(backups).toEqual(expectedBackups) // backups should now have the new nb we added
// again, if we repeat the addition, we expect nothing to happen
return ClientBackup.addNb(sampleNotebook.path, sampleNotebook.cells, changedNbConfig)
}).then(() => {
return ClientBackup.getBackups(sampleNotebook.path)
}).then(backups => {
expect(backups).toEqual(expectedBackups) // backups haven't changed.
})
.then(done)
})
it("can provide a list of all backups", done => {
const today = todayTs()
MockDate.set(today)
const tomorrowDate = new Date()
tomorrowDate.setDate(tomorrowDate.getDate() + 1)
const tomorrow = todayTs(tomorrowDate)
const changedNbConfig1 = new NotebookConfig({scala: ["someScalaDep1"]})
const changedNbConfig2 = new NotebookConfig({scala: ["someScalaDep2"]})
const changedNbConfig3 = new NotebookConfig({scala: ["someScalaDep3"]})
const expectedBackups = clean({
[sampleNotebook.path]: new Backups(sampleNotebook.path, {
[today]: [
new Backup(sampleNotebook.path, cleanCells(sampleNotebook.cells), cleanConfig(sampleNotebook.config!), today),
new Backup(sampleNotebook.path, cleanCells(sampleNotebook.cells), cleanConfig(changedNbConfig1), today)
],
[tomorrow]: [
new Backup(sampleNotebook.path, cleanCells(sampleNotebook.cells), cleanConfig(changedNbConfig2), tomorrow),
new Backup(sampleNotebook.path, cleanCells(sampleNotebook.cells), cleanConfig(changedNbConfig3), tomorrow)
]
})
})
// add today's notebooks
ClientBackup.addNb(sampleNotebook.path, sampleNotebook.cells, sampleNotebook.config).then(() => {
return ClientBackup.addNb(sampleNotebook.path, sampleNotebook.cells, changedNbConfig1)
}).then(() => {
// tick date and add tomorrow's
MockDate.set(tomorrow)
return ClientBackup.addNb(sampleNotebook.path, sampleNotebook.cells, changedNbConfig2).then(() => {
return ClientBackup.addNb(sampleNotebook.path, sampleNotebook.cells, changedNbConfig3)
})
}).then(() => {
return ClientBackup.allBackups()
}).then(backups => {
expect(backups).toEqual(expectedBackups)
}).then(done)
})
it("collects notebook updates for existing backups", done => {
const update = new UpdateConfig(0, 0, sampleNotebook.config!)
const expected = new Backups(sampleNotebook.path, {
[todayTs()]: [
new Backup(sampleNotebook.path, cleanCells(sampleNotebook.cells), cleanConfig(sampleNotebook.config!), Date.now(), [{ts: Date.now(), update: typedUpdate(update)}])
]
})
ClientBackup.addNb(sampleNotebook.path, sampleNotebook.cells, sampleNotebook.config).then(() => {
return ClientBackup.updateNb(sampleNotebook.path, update)
}).then(() => {
return ClientBackup.getBackups(sampleNotebook.path)
}).then(backup => {
expect(backup).toEqual(expected)
}).then(done)
})
it("has daily and lifetime limits to the number of backups it stores", done => {
// quiet console.warn for this test
jest.spyOn(console, 'warn').mockImplementation(() => {});
const limitNotebooks = [...Array(BACKUPS_PER_NOTEBOOK).keys()].map(i => {
return {
...sampleNotebook,
config: new NotebookConfig({scala: ["dep" + i]})
}
})
// first, add all the limitNotebooks "today" and note that we only keep the last BACKUPS_PER_DAY
const tryAll = limitNotebooks.reduce<Promise<Backups> | undefined>((promiseChain, nextNotebook) => {
if (promiseChain === undefined) {
return ClientBackup.addNb(nextNotebook.path, nextNotebook.cells, nextNotebook.config)
} else return promiseChain.then(() => ClientBackup.addNb(nextNotebook.path, nextNotebook.cells, nextNotebook.config))
}, undefined)
tryAll!.then(() => {
return ClientBackup.getBackups(sampleNotebook.path)
}).then(backups => {
const allBackups = Object.values(backups.backups)
expect(allBackups.flat().length).toEqual(BACKUPS_PER_DAY)
const lastNotebooks = limitNotebooks.map(nb => new Backup(nb.path, cleanCells(nb.cells), cleanConfig(nb.config!)).toI()).slice(-BACKUPS_PER_DAY)
expect(backups.backups[todayTs()]).toEqual(lastNotebooks)
}).then(() => {
// next, let's add notebooks to different days and show that we only keep the last BACKUPS_PER_NOTEBOOK
return limitNotebooks.reduce<Promise<Backups> | undefined>((promiseChain, nextNotebook, idx) => {
if (promiseChain === undefined) {
return ClientBackup.addNb(nextNotebook.path, nextNotebook.cells, nextNotebook.config)
} else return promiseChain.then(() => {
if (idx % BACKUPS_PER_DAY === 0) {
const date = new Date()
date.setDate(date.getDate() + 1)
MockDate.set(date)
}
return ClientBackup.addNb(nextNotebook.path, nextNotebook.cells, nextNotebook.config)
})
}, undefined)!
.then(() => {
return ClientBackup.getBackups(sampleNotebook.path)
}).then(backups => {
const allBackups = Object.values(backups.backups).flat()
expect(allBackups.length).toEqual(BACKUPS_PER_NOTEBOOK)
})
}).then(() => {
// next, show that the number should stay the same even if we add another notebook
return ClientBackup.addNb(sampleNotebook.path, sampleNotebook.cells, new NotebookConfig({python: ["foo"]}))
.then(() => {
return ClientBackup.getBackups(sampleNotebook.path)
}).then(backups => {
const allBackups = Object.values(backups.backups).flat()
expect(allBackups.length).toEqual(BACKUPS_PER_NOTEBOOK)
})
}).then(() => {
// finally, show that the number does change if we add another backup from a different notebook!
return ClientBackup.addNb("otherPath", sampleNotebook.cells, new NotebookConfig({python: ["foo"]}))
.then(() => {
return ClientBackup.allBackups()
}).then(backups => {
const allBackups = Object.values(backups).flatMap(b => Object.values(b.backups)).flat()
expect(allBackups.length).toEqual(BACKUPS_PER_NOTEBOOK + 1)
})
}).then(done)
})
}) | the_stack |
export type ListenerCallback = () => void;
export type ListenerMap = { [T in string]?: ListenerCallback };
export type Validator<T, Error> = (values: T) => ErrorType<T, Error> | undefined | Promise<ErrorType<T, Error> | undefined>;
export type ChildFormMap<T extends object, State, Error extends string> = {
[K in KeysOfType<T, object>]?: ChildFormState<T, K, State, Error>;
};
export type DirtyMap<T> = {
[Key in keyof T]?: boolean;
};
export type ErrorType<T, Error> = Error | (T extends {} ? ErrorMap<NonNullable<T>, Error> : never);
export type ErrorMap<T, Error> = {
[Key in keyof T]?: ErrorType<T[Key], Error>;
};
export type DefaultError = string;
export type DefaultState = { isSubmitting: boolean };
export type FieldsOfType<T, Field> = {
[Key in keyof T as NonNullable<T[Key]> extends Field ? Key : never]: T[Key];
};
export type KeysOfType<T extends FieldsOfType<any, Field>, Field> = keyof FieldsOfType<T, Field>;
function memberCopy<T>(value: T): T {
if (Array.isArray(value)) {
return [...value] as any;
} else if (typeof value === "object") {
return { ...value };
} else {
throw new Error(`Can only memberCopy() arrays and objects, got '${String(value)}'. Probably due to invalid useForm() value.`);
}
}
function addDistinct<T extends any[]>(arr1: T, arr2: T) {
for (let i = 0; i < arr2.length; i++) {
if (!arr1.includes(arr2[i])) arr1.push(arr2[i]);
}
}
export class FormState<T extends object, State = DefaultState, Error extends string = DefaultError> {
/**
* The id of this form, for debugging purposes.
*/
public readonly formId = ++FormState.formCounter;
/**
* The form's validator.
*/
public validator?: Validator<T, Error>;
/**
* Should the form validate on each value change?
*/
public validateOnChange: boolean;
/**
* Should the form validate when it gets mounted?
*/
public validateOnMount: boolean;
/**
* The values on this form. Use `setValues()` to set these.
*/
public readonly values: T;
/**
* The default values on this form. Use `setValues(?,?,true)` to set these.
*/
public readonly defaultValues: T;
/**
* The dictionary that maps object fields to child forms.
*/
public readonly childMap: ChildFormMap<T, State, Error> = {};
/**
* The dictionary that contains dirty states for each field.
*/
public readonly dirtyMap: DirtyMap<T> = {};
/**
* The dictionary that contains errors for each field.
*/
public readonly errorMap: ErrorMap<T, Error> = {};
private _state: State;
private listeners: { [Key in keyof T]?: ListenerMap } = {};
private anyListeners: ListenerMap = {};
private counter = 0;
private static formCounter = 0;
public constructor(
values: T,
defaultValues: T,
defaultState: State,
validator: Validator<T, Error> | undefined,
validateOnMount: boolean,
validateOnChange: boolean
) {
this.values = memberCopy(values);
this.defaultValues = memberCopy(defaultValues);
this._state = memberCopy(defaultState);
this.validator = validator;
this.validateOnMount = validateOnMount;
this.validateOnChange = validateOnChange;
}
/**
* Gets the state of the current form.
*/
public get state() {
return this._state;
}
/**
* Is this form modified?
*/
public get dirty() {
return Object.keys(this.dirtyMap).some((e) => this.dirtyMap[e]);
}
/**
* Does this form contain any error?
*/
public get error() {
return Object.keys(this.errorMap).some((e) => this.errorMap[e]);
}
/**
* Sets a value without calculating whether is has been modified.
* @param dirty Is this field dirty? Leave undefined to not set any dirty value. (can always be overridden by child forms)
* @param validate Should the form validate after value set? Overrides `validateOnChange`.
* @param isDefault Is this the default value for the said field?
* @param notifyChild Should this form notify any child form about the change?
* @param notifyParent Should this form notify any parent form about the change?
* @param setValuesWasUsed Fire all `anyListeners` after field is set? You should not touch this. (will be false for bulk sets, they will call fireAnyListeners() after every field is set)
*/
public setValueInternal<Key extends keyof T>(
key: Key,
value: T[Key] | undefined,
dirty: boolean,
validate?: boolean,
isDefault: boolean = false,
notifyChild: boolean = true,
notifyParent: boolean = true,
fireAny: boolean = true
) {
let valueMap = isDefault ? this.defaultValues : this.values;
if (value === undefined) {
if (Array.isArray(valueMap)) {
// Deleting a key in an array doesn't work, splice instead
valueMap.splice(key as number, 1);
} else {
delete valueMap[key];
}
} else {
valueMap[key] = value;
}
this.dirtyMap[key] = dirty;
if (notifyChild) {
let child = this.childMap[key as any];
if (child) {
child.setValues(value, validate, isDefault, true, false);
this.dirtyMap[key] = child.dirty;
}
}
this.fireListeners(key);
if (fireAny) this.fireAnyListeners(); // Will be false when using setValues, he will call fireAnyListeners and notifyParentValues itself
if (notifyParent && this instanceof ChildFormState) {
this.parent.setValueInternal(
this.name,
Object.keys(valueMap).length > 0 ? memberCopy(valueMap) : undefined,
this.dirty,
validate,
isDefault,
false,
true,
true
);
}
if (validate ?? (this.validateOnChange && this.validator)) this.validate();
}
/**
* Set a value on this form.
* @param key The field to set.
* @param value The field's new value.
* @param validate Should the form validate?
* @param isDefault Is this the default value?
* @param notifyChild Should this form notify the child form about this change?
* @param notifyParent Should this form notify the parent form about this change?
* @param fireAny Fire all `anyListeners` after field is set?
*/
public setValue<Key extends keyof T>(
key: Key,
value: T[Key] | undefined,
validate?: boolean,
isDefault: boolean = false,
notifyChild: boolean = true,
notifyParent: boolean = true,
fireAny: boolean = true
) {
// value can contain the default value or normal value. (Determined by isDefault)
let other = isDefault ? this.values[key] : this.defaultValues[key];
if (typeof value === "object" && value !== null) {
let dirty: boolean | undefined = false;
// Compare objects if there is no child form, because it calculates the dirty value for us
if (!(key in this.childMap)) {
if (value instanceof Date && other instanceof Date) {
// Compare date objects
dirty = value?.getTime() !== other?.getTime();
} else {
dirty = JSON.stringify(value) !== JSON.stringify(other);
}
}
this.setValueInternal(key, value, dirty, validate, isDefault, notifyChild, notifyParent, fireAny);
} else {
// Compare value and existing value/defaultValue which determines dirty
let dirty = value !== other;
if ((isDefault ? this.defaultValues[key] : this.values[key]) === value && this.dirtyMap[key] === dirty) {
return;
}
this.setValueInternal(key, value, dirty, validate, isDefault, notifyChild, notifyParent, fireAny);
}
}
/**
* Set multiple values OR default values on this form.
* @param values The new values to set on this form.
* @param validate Validate? Overrides `validateOnChange`.
* @param isDefault Leave undefined to set both `values` and `defaultValues`. Set to true to only set `defaultValues` and false to only set `values`.
* @param notifyChild Should this form notify the child form about this change?
* @param notifyParent Should this form notify the parent form about this change?
*/
public setValues(
values: Partial<T> | undefined,
validate?: boolean,
isDefault?: boolean,
notifyChild: boolean = true,
notifyParent: boolean = true
) {
if (isDefault === undefined) {
this.setValues(values, false, true, notifyChild, notifyParent);
isDefault = false;
}
let keys = Object.keys(isDefault ? this.defaultValues : this.values);
let v: typeof values = values ?? {};
addDistinct(keys, Object.keys(v));
// Traverse backwards, so when removing array items, the whole array gets shifted in the right direction
for (let i = keys.length - 1; i >= 0; i--) {
let key = keys[i] as keyof T;
this.setValue(
key,
v[key],
false, // Will validate after all values are copied
isDefault,
notifyChild,
false, // Will call updateParentValues by itself after all values are copied, see 3 lines down
false // Will call fireAnyListener by itself after all values are copied, see 3 lines down
);
}
this.fireAnyListeners();
if (notifyParent && this instanceof ChildFormState) {
if (typeof values === "object" && values !== null) {
this.parent.setValueInternal(
this.name,
isDefault ? memberCopy(this.defaultValues) : memberCopy(this.values),
this.dirty,
validate,
isDefault,
false,
true,
true
);
} else {
this.parent.setValueInternal(this.name, values, this.dirty, validate, isDefault, false, true, true);
}
}
if (validate ?? (this.validateOnChange && this.validator)) this.validate();
}
/**
* Force validation on this form. Required when `validateOnChange` is disabled. **This function works with both asynchronous and synchronous validators.**
* @returns true if the form is valid.
*/
public async validate() {
if (!this.validator) return true;
let r = this.validator(this.values);
if (r instanceof Promise) r = await r;
this.setErrors(r ?? ({} as ErrorType<T, Error>));
return !this.error;
}
/**
* Force validation on this form. Required when `validateOnChange` is disabled. **This only works if you have a synchronous validator set (not async).**
* @returns true if the form is valid.
*/
public validateSync() {
if (!this.validator) return true;
let r = this.validator(this.values);
if (r instanceof Promise)
throw new Error("validateSync() was called on a form with an asynchronous validator set, please use `await form.validate()` instead.");
this.setErrors(r ?? ({} as ErrorType<T, Error>));
return !this.error;
}
/**
* Sets an error on this form
* @param key The field to set an error on.
* @param error The error.
* @param notifyChild Should this form notify the child form about this change?
* @param notifyParent Should this form notify the parent form about this change?
* @param fireAny Fire all `anyListeners` after field is set?
*/
public setError<Key extends keyof T>(
key: Key,
error: ErrorType<NonNullable<T[Key]>, Error> | undefined,
notifyChild: boolean = true,
notifyParent: boolean = true,
fireAny: boolean = true
) {
if (typeof error !== "object" && this.errorMap[key] === error) return false;
if (!error) delete this.errorMap[key];
else this.errorMap[key] = error;
if (notifyChild && this.childMap[key as any]) {
let changed = this.childMap[(key as unknown) as KeysOfType<T, object>]!.setErrors(error ?? ({} as any), true, false);
if (!changed && error !== undefined) return false;
}
this.fireListeners(key);
if (fireAny) this.fireAnyListeners(); // When setValuesWasUsed, it will call fireAnyListener itself when all values were set
if (notifyParent && this instanceof ChildFormState) {
this.parent.setError(this.name, this.error ? memberCopy(this.errorMap) : undefined, false, true);
}
return true;
}
/**
* Sets all the errors on this form.
* @param errors The new errors for this form. Use {} to clear errors. **The format of this error object must follow the same structure of the values object, but each value is replaced by its error.**
* @param notifyChild Should this form notify the child form about this change?
* @param notifyParent Should this form notify the parent form about this change?
*/
public setErrors(errors: ErrorType<T, Error>, notifyChild: boolean = true, notifyParent: boolean = true) {
let keys = Object.keys(this.errorMap);
if (typeof errors === "string") {
if (notifyParent && this instanceof ChildFormState) {
this.parent.setError(this.name, errors, false, true);
}
errors = {} as ErrorType<T, Error>;
} else {
addDistinct(keys, Object.keys(errors));
}
let changed = false;
for (let i = keys.length; i >= 0; i--) {
let key = keys[i] as keyof T;
if (
this.setError(
key,
(errors as ErrorMap<T, Error>)[key] as ErrorType<NonNullable<T[keyof T]>, Error>,
notifyChild,
false, // Will call this.parent.setError by itself after all values have been copied, see 3 lines down
false // Will call fireAnyListener by itself after all values have been copied, see 3 lines down
)
) {
changed = true;
}
}
if (!changed) return false;
this.fireAnyListeners();
if (notifyParent && this instanceof ChildFormState) {
this.parent.setError(this.name, this.error ? (memberCopy(this.errorMap) as any) : undefined, false, true);
}
return true;
}
/**
* Reset this form's values to the default values.
* @param notifyChild Should this form notify the child form about this change?
* @param notifyParent Should this form notify the parent form about this change?
*/
public resetAll(validate?: boolean, notifyChild: boolean = true, notifyParent: boolean = true) {
this.setValues(this.defaultValues, validate ?? true, false, notifyChild, notifyParent);
}
/**
* Reset a form's field to its default value.
* @param key The field to reset.
* @param notifyChild Should this form notify the child form about this change?
* @param notifyParent Should this form notify the parent form about this change?
*/
public reset(key: keyof T, validate?: boolean, notifyChild: boolean = true, notifyParent: boolean = true) {
this.setValue(key, this.defaultValues[key], validate ?? true, false, notifyChild, notifyParent);
}
/**
* Sets the state for this form, and also on child and parent forms by default.
* @param newState The new form state.
* @param notifyChild Set the state on the child too?
* @param notifyParent Set the state on the parent too?
*/
public setState(newState: State, notifyChild: boolean = true, notifyParent: boolean = true) {
this._state = newState;
let c = Object.keys(this.values) as (keyof T)[];
if (notifyChild) c.forEach((e) => (this.childMap[e as any] as ChildFormState<T, any, State, Error>)?.setState(newState, true, false));
c.forEach((e) => this.fireListeners(e));
this.fireAnyListeners();
if (notifyParent && this instanceof ChildFormState) {
this.parent.setState(memberCopy(this.state), false, true);
}
}
/**
* Creates a submit handler to pass to your `<form onSubmit={...}>`. The function executes the passed handler only if the form validates correctly.
* @param handler The handler to execute when this form contains no errors.
*/
public handleSubmit(handler: (form: FormState<T, State, Error>, ev: React.FormEvent<HTMLFormElement>) => void | Promise<void>) {
async function handle(this: FormState<T, State, Error>, ev: React.FormEvent<HTMLFormElement>) {
ev.preventDefault();
// Show helpful warning when using buttons to submit
if (process.env.NODE_ENV === "development") {
let buttons = Array.from((ev.target as HTMLFormElement).querySelectorAll("button"));
let noTypeButton = buttons.find((e) => !("type" in e.attributes));
if (noTypeButton) {
console.error(
`The submitted form contains a button without a type attribute. Please populate every button in your form with either type="button" or type="submit".`,
noTypeButton
);
}
}
if (!(await this.validate())) return;
this.setState({ ...this.state, isSubmitting: true });
await handler(this, ev);
this.setState({ ...this.state, isSubmitting: false });
}
return handle.bind(this);
}
/**
* Listen for changes on a field, will trigger when value, defaultValue, dirty and error changes for a field. Make sure you pass its return value back to `ignore()` after you are done listening.
* @param key The field to listen to.
* @param listener Change callback.
*/
public listen(key: keyof T, listener: ListenerCallback): string {
if (!this.listeners) this.listeners = {};
let setters = this.listeners[key];
if (!setters) {
setters = {};
this.listeners[key] = setters;
}
let id = "" + this.counter++;
setters[id] = listener;
return id;
}
/**
* Listen for any change on this form. Make sure you pass its return value back to `ignoreAny()` after you are done listening.
* @param listener Change callback.
*/
public listenAny(listener: ListenerCallback) {
if (!this.anyListeners) this.anyListeners = {};
let id = "" + this.counter++;
this.anyListeners[id] = listener;
return id;
}
/**
* Ignore changes on a field.
* @param key The field to ignore.
* @param id The callback to ignore.
*/
public ignore(key: keyof T, id: string) {
if (!this.listeners) return;
let setters = this.listeners[key];
if (!setters) {
console.warn("Ignore was called for no reason", key, id);
return;
}
delete setters[id];
}
/**
* Ignore changes on this form.
* @param id The callback to ignore.
*/
public ignoreAny(id: string) {
if (!this.anyListeners) return;
delete this.anyListeners[id];
}
protected fireListeners(key: keyof T) {
let a = this.listeners[key];
if (a) {
let l = Object.keys(a!);
l.forEach((e) => a![e]!());
}
}
protected fireAnyListeners() {
let al = Object.keys(this.anyListeners);
al.forEach((e) => this.anyListeners[e]!());
}
}
export class ChildFormState<T extends FieldsOfType<any, object>, K extends KeysOfType<T, object>, State, Error extends string> extends FormState<
NonNullable<T[K]>,
State,
Error
> {
public name: K;
public readonly parent: FormState<T, State, Error>;
public constructor(parent: FormState<T, State, Error>, name: K) {
super(
parent.values[name] ?? ({} as any),
parent.defaultValues[name] ?? ({} as any),
parent.state,
undefined,
parent.validateOnMount,
parent.validateOnChange
);
this.parent = parent;
this.name = name;
}
} | the_stack |
import { getLogger } from 'pinus-logger';
import { MqttServer, MqttSocket } from '../protocol/mqtt/mqttServer';
import { EventEmitter } from 'events';
import { MasterSocket } from './masterSocket';
import * as protocol from '../util/protocol';
import * as utils from '../util/utils';
import * as Util from 'util';
import { ConsoleService } from '../consoleService';
import { ServerInfo, AdminUserInfo, AdminServerInfo, Callback } from '../util/constants';
import * as path from 'path';
import { MqttConnection } from '../protocol/mqtt/mqttConnectorDefine';
let logger = getLogger('pinus-admin', path.basename(__filename));
let ST_INITED = 1;
let ST_STARTED = 2;
let ST_CLOSED = 3;
export type WhiteList = string[];
export interface MasterAgentOptions { whitelist?: WhiteList; }
export interface AgentClient {
id: string;
type: string;
pid: string;
info: AdminUserInfo | ServerInfo;
socket: MqttSocket;
}
export interface AuthUserRequest { username: string; password: string; md5: string; id: string; type: string; }
export interface AuthServerRequest { id: string; serverType: string; token: string; pid: string; info: ServerInfo; }
/**
* MasterAgent Constructor
*
* @class MasterAgent
* @constructor
* @param {Object} opts construct parameter
* opts.consoleService {Object} consoleService
* opts.id {String} server id
* opts.type {String} server type, 'master', 'connector', etc.
* opts.socket {Object} socket-io object
* opts.reqId {Number} reqId add by 1
* opts.callbacks {Object} callbacks
* opts.state {Number} MasterAgent state
* @api public
*/
export class MasterAgent extends EventEmitter {
reqId = 1;
idMap: { [serverId: string]: AgentClient } = {};
msgMap: {
[serverId: string]: {
[reqId: number]: {
moduleId: string,
msg: any
}
}
} = {};
typeMap: { [type: string]: AgentClient[] } = {};
clients: { [id: string]: AgentClient } = {};
sockets: { [id: string]: MqttConnection } = {};
slaveMap: { [serverId: string]: AgentClient[] } = {};
server: MqttServer = null;
callbacks: { [reqId: number]: Callback } = {};
state = ST_INITED;
whitelist: WhiteList;
consoleService: ConsoleService;
constructor(consoleService: ConsoleService, opts: MasterAgentOptions) {
super();
this.whitelist = opts.whitelist;
this.consoleService = consoleService;
}
/**
* master listen to a port and handle register and request
*
* @param {String} port
* @api public
*/
listen(port: number, cb: (err?: Error) => void) {
if (this.state > ST_INITED) {
logger.error('master agent has started or closed.');
return;
}
this.state = ST_STARTED;
this.server = new MqttServer();
this.server.listen(port);
// this.server = sio.listen(port);
// this.server.set('log level', 0);
cb = cb || function () { };
let self = this;
this.server.on('error', function (err) {
self.emit('error', err);
cb(err);
});
this.server.once('listening', function () {
setImmediate(function () {
cb();
});
});
this.server.on('connection', function (socket) {
// let id, type, info, registered, username;
let masterSocket = new MasterSocket();
masterSocket['agent'] = self;
masterSocket['socket'] = socket;
self.sockets[socket.id] = socket;
socket.on('register', function (msg: any) {
// register a new connection
masterSocket.onRegister(msg);
}); // end of on 'register'
// message from monitor
socket.on('monitor', function (msg: any) {
masterSocket.onMonitor(msg);
}); // end of on 'monitor'
// message from client
socket.on('client', function (msg: any) {
masterSocket.onClient(msg);
}); // end of on 'client'
socket.on('reconnect', function (msg: any) {
masterSocket.onReconnect(msg);
});
socket.on('disconnect', function () {
masterSocket.onDisconnect();
});
socket.on('close', function () {
masterSocket.onDisconnect();
});
socket.on('error', function (err: Error) {
masterSocket.onError(err);
});
}); // end of on 'connection'
} // end of listen
/**
* close master agent
*
* @api public
*/
close() {
if (this.state > ST_STARTED) {
return;
}
this.state = ST_CLOSED;
this.server.close();
}
/**
* set module
*
* @param {String} moduleId module id/name
* @param {Object} value module object
* @api public
*/
set(moduleId: string, value: any) {
this.consoleService.set(moduleId, value);
}
/**
* get module
*
* @param {String} moduleId module id/name
* @api public
*/
get(moduleId: string) {
return this.consoleService.get(moduleId);
}
/**
* getClientById
*
* @param {String} clientId
* @api public
*/
getClientById(clientId: string) {
return this.clients[clientId];
}
/**
* request monitor{master node} data from monitor
*
* @param {String} serverId
* @param {String} moduleId module id/name
* @param {Object} msg
* @param {Function} callback function
* @api public
*/
request(serverId: string, moduleId: string, msg: any, cb: (errOrResult?: Error | any, body?: any) => void) {
if (this.state > ST_STARTED) {
return false;
}
cb = cb || function () { };
let curId = this.reqId++;
this.callbacks[curId] = cb;
if (!this.msgMap[serverId]) {
this.msgMap[serverId] = {};
}
this.msgMap[serverId][curId] = {
moduleId: moduleId,
msg: msg
};
let record = this.idMap[serverId];
if (!record) {
cb(new Error('unknown server id:' + serverId));
return false;
}
this.sendToMonitor(record.socket, curId, moduleId, msg);
return true;
}
/**
* request server data from monitor by serverInfo{host:port}
*
* @param {String} serverId
* @param {Object} serverInfo
* @param {String} moduleId module id/name
* @param {Object} msg
* @param {Function} callback function
* @api public
*/
requestServer(serverId: string, serverInfo: ServerInfo, moduleId: string, msg: any, cb: Callback) {
if (this.state > ST_STARTED) {
return false;
}
let record = this.idMap[serverId];
if (!record) {
utils.invokeCallback(cb, new Error('unknown server id:' + serverId));
return false;
}
let curId = this.reqId++;
this.callbacks[curId] = cb;
if (utils.compareServer(record.info as ServerInfo, serverInfo)) {
this.sendToMonitor(record.socket, curId, moduleId, msg);
} else {
let slaves = this.slaveMap[serverId];
for (let i = 0, l = slaves.length; i < l; i++) {
if (utils.compareServer(slaves[i].info as ServerInfo, serverInfo)) {
this.sendToMonitor(slaves[i].socket, curId, moduleId, msg);
break;
}
}
}
return true;
}
/**
* notify a monitor{master node} by id without callback
*
* @param {String} serverId
* @param {String} moduleId module id/name
* @param {Object} msg
* @api public
*/
notifyById(serverId: string, moduleId: string, msg: any) {
if (this.state > ST_STARTED) {
return false;
}
let record = this.idMap[serverId];
if (!record) {
logger.error('fail to notifyById for unknown server id:' + serverId);
return false;
}
this.sendToMonitor(record.socket, null, moduleId, msg);
return true;
}
/**
* notify a monitor by server{host:port} without callback
*
* @param {String} serverId
* @param {Object} serverInfo{host:port}
* @param {String} moduleId module id/name
* @param {Object} msg
* @api public
*/
notifyByServer(serverId: string, serverInfo: ServerInfo, moduleId: string, msg: any) {
if (this.state > ST_STARTED) {
return false;
}
let record = this.idMap[serverId];
if (!record) {
logger.error('fail to notifyByServer for unknown server id:' + serverId);
return false;
}
if (utils.compareServer(record.info as ServerInfo, serverInfo)) {
this.sendToMonitor(record.socket, null, moduleId, msg);
} else {
let slaves = this.slaveMap[serverId];
for (let i = 0, l = slaves.length; i < l; i++) {
if (utils.compareServer(slaves[i].info as ServerInfo, serverInfo)) {
this.sendToMonitor(slaves[i].socket, null, moduleId, msg);
break;
}
}
}
return true;
}
/**
* notify slaves by id without callback
*
* @param {String} serverId
* @param {String} moduleId module id/name
* @param {Object} msg
* @api public
*/
notifySlavesById(serverId: string, moduleId: string, msg: any) {
if (this.state > ST_STARTED) {
return false;
}
let slaves = this.slaveMap[serverId];
if (!slaves || slaves.length === 0) {
logger.error('fail to notifySlavesById for unknown server id:' + serverId);
return false;
}
this.broadcastMonitors(slaves, moduleId, msg);
return true;
}
/**
* notify monitors by type without callback
*
* @param {String} type serverType
* @param {String} moduleId module id/name
* @param {Object} msg
* @api public
*/
notifyByType(type: string, moduleId: string, msg: any) {
if (this.state > ST_STARTED) {
return false;
}
let list = this.typeMap[type];
if (!list || list.length === 0) {
logger.error('fail to notifyByType for unknown server type:' + type);
return false;
}
this.broadcastMonitors(list, moduleId, msg);
return true;
}
/**
* notify all the monitors without callback
*
* @param {String} moduleId module id/name
* @param {Object} msg
* @api public
*/
notifyAll(moduleId: string, msg?: any) {
if (this.state > ST_STARTED) {
return false;
}
this.broadcastMonitors(this.idMap, moduleId, msg);
return true;
}
/**
* notify a client by id without callback
*
* @param {String} clientId
* @param {String} moduleId module id/name
* @param {Object} msg
* @api public
*/
notifyClient(clientId: string, moduleId: string, msg: any) {
if (this.state > ST_STARTED) {
return false;
}
let record = this.clients[clientId];
if (!record) {
logger.error('fail to notifyClient for unknown client id:' + clientId);
return false;
}
this.sendToClient(record.socket, null, moduleId, msg);
}
notifyCommand(command: string, moduleId: string, msg: any) {
if (this.state > ST_STARTED) {
return false;
}
this.broadcastCommand(this.idMap, command, moduleId, msg);
return true;
}
doAuthUser(msg: AuthUserRequest, socket: MqttSocket, cb: Callback) {
if (!msg.id) {
// client should has a client id
return cb(new Error('client should has a client id'));
}
let self = this;
let username = msg.username;
if (!username) {
// client should auth with username
this.doSend(socket, 'register', {
code: protocol.PRO_FAIL,
msg: 'client should auth with username'
});
return cb(new Error('client should auth with username'));
}
let authUser = self.consoleService.authUser;
let env = self.consoleService.env;
authUser(msg, env, (user) => {
if (!user) {
// client should auth with username
this.doSend(socket, 'register', {
code: protocol.PRO_FAIL,
msg: 'client auth failed with username or password error'
});
return cb(new Error('client auth failed with username or password error'));
}
if (self.clients[msg.id]) {
this.doSend(socket, 'register', {
code: protocol.PRO_FAIL,
msg: 'id has been registered. id:' + msg.id
});
return cb(new Error('id has been registered. id:' + msg.id));
}
logger.info('client user : ' + username + ' login to master');
this.addConnection(msg.id, msg.type, null, user, socket);
this.doSend(socket, 'register', {
code: protocol.PRO_OK,
msg: 'ok'
});
cb();
});
}
doAuthServer(msg: AuthServerRequest, socket: MqttSocket, cb: Callback) {
let self = this;
let authServer = self.consoleService.authServer;
let env = self.consoleService.env;
authServer(msg, env, (status) => {
if (status !== 'ok') {
this.doSend(socket, 'register', {
code: protocol.PRO_FAIL,
msg: 'server auth failed,check config `adminServer`.'
});
cb(new Error('server auth failed,check config `adminServer`.'));
return;
}
let record = this.addConnection(msg.id, msg.serverType, msg.pid, msg.info, socket);
this.doSend(socket, 'register', {
code: protocol.PRO_OK,
msg: 'ok'
});
msg.info = msg.info;
msg.info.pid = msg.pid;
self.emit('register', msg.info);
cb(null);
});
}
/**
* add monitor,client to connection -- idMap
*
* @param {Object} agent agent object
* @param {String} id
* @param {String} type serverType
* @param {Object} socket socket-io object
* @api private
*/
addConnection(id: string, type: string, pid: string, info: AdminUserInfo | ServerInfo, socket: MqttSocket) {
let record: AgentClient = {
id: id,
type: type,
pid: pid,
info: info,
socket: socket
};
if (type === 'client') {
this.clients[id] = record;
} else {
if (!this.idMap[id]) {
this.idMap[id] = record;
let list = this.typeMap[type] = this.typeMap[type] || [];
list.push(record);
} else {
let slaves = this.slaveMap[id] = this.slaveMap[id] || [];
slaves.push(record);
}
}
return record;
}
/**
* remove monitor,client connection -- idMap
*
* @param {Object} agent agent object
* @param {String} id
* @param {String} type serverType
* @api private
*/
removeConnection(id: string, type: string, info: ServerInfo) {
if (type === 'client') {
delete this.clients[id];
} else {
// remove master node in idMap and typeMap
let record = this.idMap[id];
if (!record) {
return;
}
let _info = record['info']; // info {host, port}
if (utils.compareServer(_info as ServerInfo, info)) {
delete this.idMap[id];
let list = this.typeMap[type];
if (list) {
for (let i = 0, l = list.length; i < l; i++) {
if (list[i].id === id) {
list.splice(i, 1);
break;
}
}
if (list.length === 0) {
delete this.typeMap[type];
}
}
} else {
// remove slave node in slaveMap
let slaves = this.slaveMap[id];
if (slaves) {
for (let i = 0, l = slaves.length; i < l; i++) {
if (utils.compareServer(slaves[i]['info'] as ServerInfo, info)) {
slaves.splice(i, 1);
break;
}
}
if (slaves.length === 0) {
delete this.slaveMap[id];
}
}
}
}
}
/**
* send msg to monitor
*
* @param {Object} socket socket-io object
* @param {Number} reqId request id
* @param {String} moduleId module id/name
* @param {Object} msg message
* @api private
*/
sendToMonitor(socket: MqttSocket, reqId: number, moduleId: string, msg: any) {
this.doSend(socket, 'monitor', protocol.composeRequest(reqId, moduleId, msg));
}
/**
* send msg to client
*
* @param {Object} socket socket-io object
* @param {Number} reqId request id
* @param {String} moduleId module id/name
* @param {Object} msg message
* @api private
*/
sendToClient(socket: MqttSocket, reqId: number, moduleId: string, msg: any) {
this.doSend(socket, 'client', protocol.composeRequest(reqId, moduleId, msg));
}
doSend(socket: MqttSocket, topic: string, msg: any) {
socket.send(topic, msg);
}
/**
* broadcast msg to monitor
*
* @param {Object} record registered modules
* @param {String} moduleId module id/name
* @param {Object} msg message
* @api private
*/
broadcastMonitors(records: { [serverId: string]: AgentClient } | AgentClient[] , moduleId: string, msg: any) {
msg = protocol.composeRequest(null, moduleId, msg);
if (records instanceof Array) {
for (let i = 0, l = records.length; i < l; i++) {
let socket = records[i].socket;
this.doSend(socket, 'monitor', msg);
}
} else {
for (let id in records) {
let record = (records as { [id: string]: AgentClient })[id];
let socket = record.socket;
this.doSend(socket, 'monitor', msg);
}
}
}
broadcastCommand(records: AgentClient[] | { [id: string]: AgentClient }, command: string, moduleId: string, msg: any) {
msg = protocol.composeCommand(null, command, moduleId, msg);
if (records instanceof Array) {
for (let i = 0, l = records.length; i < l; i++) {
let socket = records[i].socket;
this.doSend(socket, 'monitor', msg);
}
}
else {
for (let id in records) {
let record = (records as { [id: string]: AgentClient })[id];
let socket = record.socket;
this.doSend(socket, 'monitor', msg);
}
}
}
} | the_stack |
import React, { useState, useEffect, useRef, useContext } from 'react' // eslint-disable-line
import { FileExplorer } from './components/file-explorer' // eslint-disable-line
import './css/remix-ui-workspace.css'
import { FileSystemContext } from './contexts'
const canUpload = window.File || window.FileReader || window.FileList || window.Blob
export function Workspace () {
const LOCALHOST = ' - connect to localhost - '
const NO_WORKSPACE = ' - none - '
const [currentWorkspace, setCurrentWorkspace] = useState<string>(NO_WORKSPACE)
const global = useContext(FileSystemContext)
const workspaceRenameInput = useRef()
const workspaceCreateInput = useRef()
useEffect(() => {
resetFocus()
}, [])
useEffect(() => {
if (global.fs.mode === 'browser') {
if (global.fs.browser.currentWorkspace) setCurrentWorkspace(global.fs.browser.currentWorkspace)
else setCurrentWorkspace(NO_WORKSPACE)
global.dispatchFetchWorkspaceDirectory(global.fs.browser.currentWorkspace)
} else if (global.fs.mode === 'localhost') {
global.dispatchFetchWorkspaceDirectory('/')
setCurrentWorkspace(LOCALHOST)
}
}, [global.fs.browser.currentWorkspace, global.fs.localhost.sharedFolder, global.fs.mode])
useEffect(() => {
if (global.fs.browser.currentWorkspace && !global.fs.browser.workspaces.includes(global.fs.browser.currentWorkspace)) {
if (global.fs.browser.workspaces.length > 0) {
switchWorkspace(global.fs.browser.workspaces[global.fs.browser.workspaces.length - 1])
} else {
switchWorkspace(NO_WORKSPACE)
}
}
}, [global.fs.browser.workspaces])
const renameCurrentWorkspace = () => {
global.modal('Rename Current Workspace', renameModalMessage(), 'OK', onFinishRenameWorkspace, '')
}
const createWorkspace = () => {
global.modal('Create Workspace', createModalMessage(), 'OK', onFinishCreateWorkspace, '')
}
const deleteCurrentWorkspace = () => {
global.modal('Delete Current Workspace', 'Are you sure to delete the current workspace?', 'OK', onFinishDeleteWorkspace, '')
}
const onFinishRenameWorkspace = async () => {
if (workspaceRenameInput.current === undefined) return
// @ts-ignore: Object is possibly 'null'.
const workspaceName = workspaceRenameInput.current.value
try {
await global.dispatchRenameWorkspace(currentWorkspace, workspaceName)
} catch (e) {
global.modal('Rename Workspace', e.message, 'OK', () => {}, '')
console.error(e)
}
}
const onFinishCreateWorkspace = async () => {
if (workspaceCreateInput.current === undefined) return
// @ts-ignore: Object is possibly 'null'.
const workspaceName = workspaceCreateInput.current.value
try {
await global.dispatchCreateWorkspace(workspaceName)
} catch (e) {
global.modal('Create Workspace', e.message, 'OK', () => {}, '')
console.error(e)
}
}
const onFinishDeleteWorkspace = async () => {
try {
await global.dispatchDeleteWorkspace(global.fs.browser.currentWorkspace)
} catch (e) {
global.modal('Delete Workspace', e.message, 'OK', () => {}, '')
console.error(e)
}
}
/** ** ****/
const resetFocus = () => {
global.dispatchSetFocusElement([{ key: '', type: 'folder' }])
}
const switchWorkspace = async (name: string) => {
try {
await global.dispatchSwitchToWorkspace(name)
global.dispatchHandleExpandPath([])
} catch (e) {
global.modal('Switch To Workspace', e.message, 'OK', () => {}, '')
console.error(e)
}
}
const createModalMessage = () => {
return (
<>
<input type="text" data-id="modalDialogCustomPromptTextCreate" defaultValue={`workspace_${Date.now()}`} ref={workspaceCreateInput} className="form-control" />
</>
)
}
const renameModalMessage = () => {
return (
<>
<input type="text" data-id="modalDialogCustomPromptTextRename" defaultValue={ currentWorkspace } ref={workspaceRenameInput} className="form-control" />
</>
)
}
return (
<div className='remixui_container'>
<div className='remixui_fileexplorer' data-id="remixUIWorkspaceExplorer" onClick={resetFocus}>
<div>
<header>
<div className="mb-2">
<label className="form-check-label" htmlFor="workspacesSelect">
Workspaces
</label>
<span className="remixui_menu">
<span
hidden={currentWorkspace === LOCALHOST || currentWorkspace === NO_WORKSPACE}
id='workspaceCreate'
data-id='workspaceCreate'
onClick={(e) => {
e.stopPropagation()
createWorkspace()
}}
className='far fa-plus-square remixui_menuicon'
title='Create'>
</span>
<span
hidden={currentWorkspace === LOCALHOST || currentWorkspace === NO_WORKSPACE}
id='workspaceRename'
data-id='workspaceRename'
onClick={(e) => {
e.stopPropagation()
renameCurrentWorkspace()
}}
className='far fa-edit remixui_menuicon'
title='Rename'>
</span>
<span
hidden={currentWorkspace === LOCALHOST || currentWorkspace === NO_WORKSPACE}
id='workspaceDelete'
data-id='workspaceDelete'
onClick={(e) => {
e.stopPropagation()
deleteCurrentWorkspace()
}}
className='fas fa-trash'
title='Delete'>
</span>
</span>
<select id="workspacesSelect" value={currentWorkspace} data-id="workspacesSelect" onChange={(e) => switchWorkspace(e.target.value)} className="form-control custom-select">
{
global.fs.browser.workspaces
.map((folder, index) => {
return <option key={index} value={folder}>{folder}</option>
})
}
<option value={LOCALHOST}>{currentWorkspace === LOCALHOST ? 'localhost' : LOCALHOST}</option>
{ global.fs.browser.workspaces.length <= 0 && <option value={NO_WORKSPACE}>{NO_WORKSPACE}</option> }
</select>
</div>
</header>
</div>
<div className='remixui_fileExplorerTree'>
<div>
<div className='pl-2 remixui_treeview' data-id='filePanelFileExplorerTree'>
{ (global.fs.mode === 'browser') && (currentWorkspace !== NO_WORKSPACE) &&
<FileExplorer
name={currentWorkspace}
menuItems={['createNewFile', 'createNewFolder', 'publishToGist', canUpload ? 'uploadFile' : '']}
contextMenuItems={global.fs.browser.contextMenu.registeredMenuItems}
removedContextMenuItems={global.fs.browser.contextMenu.removedMenuItems}
files={global.fs.browser.files}
expandPath={global.fs.browser.expandPath}
focusEdit={global.fs.focusEdit}
focusElement={global.fs.focusElement}
dispatchCreateNewFile={global.dispatchCreateNewFile}
modal={global.modal}
dispatchCreateNewFolder={global.dispatchCreateNewFolder}
readonly={global.fs.readonly}
toast={global.toast}
dispatchDeletePath={global.dispatchDeletePath}
dispatchRenamePath={global.dispatchRenamePath}
dispatchUploadFile={global.dispatchUploadFile}
dispatchCopyFile={global.dispatchCopyFile}
dispatchCopyFolder={global.dispatchCopyFolder}
dispatchPublishToGist={global.dispatchPublishToGist}
dispatchRunScript={global.dispatchRunScript}
dispatchEmitContextMenuEvent={global.dispatchEmitContextMenuEvent}
dispatchHandleClickFile={global.dispatchHandleClickFile}
dispatchSetFocusElement={global.dispatchSetFocusElement}
dispatchFetchDirectory={global.dispatchFetchDirectory}
dispatchRemoveInputField={global.dispatchRemoveInputField}
dispatchAddInputField={global.dispatchAddInputField}
dispatchHandleExpandPath={global.dispatchHandleExpandPath}
/>
}
</div>
{
global.fs.localhost.isRequestingLocalhost ? <div className="text-center py-5"><i className="fas fa-spinner fa-pulse fa-2x"></i></div>
: <div className='pl-2 filesystemexplorer remixui_treeview'>
{ global.fs.mode === 'localhost' && global.fs.localhost.isSuccessfulLocalhost &&
<FileExplorer
name='localhost'
menuItems={['createNewFile', 'createNewFolder']}
contextMenuItems={global.fs.localhost.contextMenu.registeredMenuItems}
removedContextMenuItems={global.fs.localhost.contextMenu.removedMenuItems}
files={global.fs.localhost.files}
expandPath={global.fs.localhost.expandPath}
focusEdit={global.fs.focusEdit}
focusElement={global.fs.focusElement}
dispatchCreateNewFile={global.dispatchCreateNewFile}
modal={global.modal}
dispatchCreateNewFolder={global.dispatchCreateNewFolder}
readonly={global.fs.readonly}
toast={global.toast}
dispatchDeletePath={global.dispatchDeletePath}
dispatchRenamePath={global.dispatchRenamePath}
dispatchUploadFile={global.dispatchUploadFile}
dispatchCopyFile={global.dispatchCopyFile}
dispatchCopyFolder={global.dispatchCopyFolder}
dispatchPublishToGist={global.dispatchPublishToGist}
dispatchRunScript={global.dispatchRunScript}
dispatchEmitContextMenuEvent={global.dispatchEmitContextMenuEvent}
dispatchHandleClickFile={global.dispatchHandleClickFile}
dispatchSetFocusElement={global.dispatchSetFocusElement}
dispatchFetchDirectory={global.dispatchFetchDirectory}
dispatchRemoveInputField={global.dispatchRemoveInputField}
dispatchAddInputField={global.dispatchAddInputField}
dispatchHandleExpandPath={global.dispatchHandleExpandPath}
/>
}
</div>
}
</div>
</div>
</div>
</div>
)
}
export default Workspace | the_stack |
import * as GLib from "@gi-types/glib";
export const PARAM_MASK: number;
export const PARAM_STATIC_STRINGS: number;
export const PARAM_USER_SHIFT: number;
export const SIGNAL_FLAGS_MASK: number;
export const SIGNAL_MATCH_MASK: number;
export const TYPE_FLAG_RESERVED_ID_BIT: GLib.Type;
export const TYPE_FUNDAMENTAL_MAX: number;
export const TYPE_FUNDAMENTAL_SHIFT: number;
export const TYPE_RESERVED_BSE_FIRST: number;
export const TYPE_RESERVED_BSE_LAST: number;
export const TYPE_RESERVED_GLIB_FIRST: number;
export const TYPE_RESERVED_GLIB_LAST: number;
export const TYPE_RESERVED_USER_FIRST: number;
export const VALUE_INTERNED_STRING: number;
export const VALUE_NOCOPY_CONTENTS: number;
export function boxed_copy(boxed_type: GType, src_boxed: any): any;
export function boxed_free(boxed_type: GType, boxed: any): void;
export function cclosure_marshal_BOOLEAN__BOXED_BOXED(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_BOOLEAN__FLAGS(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_STRING__OBJECT_POINTER(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__BOOLEAN(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__BOXED(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__CHAR(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__DOUBLE(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__ENUM(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__FLAGS(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__FLOAT(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__INT(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__LONG(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__OBJECT(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__PARAM(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__POINTER(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__STRING(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__UCHAR(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__UINT(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__UINT_POINTER(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__ULONG(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__VARIANT(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_VOID__VOID(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function cclosure_marshal_generic(
closure: Closure,
return_gvalue: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
export function clear_signal_handler(handler_id_ptr: number, instance: Object): void;
export function enum_complete_type_info(g_enum_type: GType, const_values: EnumValue): TypeInfo;
export function enum_get_value(enum_class: EnumClass, value: number): EnumValue;
export function enum_get_value_by_name(enum_class: EnumClass, name: string): EnumValue;
export function enum_get_value_by_nick(enum_class: EnumClass, nick: string): EnumValue;
export function enum_register_static(name: string, const_static_values: EnumValue): GType;
export function enum_to_string(g_enum_type: GType, value: number): string;
export function flags_complete_type_info(g_flags_type: GType, const_values: FlagsValue): TypeInfo;
export function flags_get_first_value(flags_class: FlagsClass, value: number): FlagsValue;
export function flags_get_value_by_name(flags_class: FlagsClass, name: string): FlagsValue;
export function flags_get_value_by_nick(flags_class: FlagsClass, nick: string): FlagsValue;
export function flags_register_static(name: string, const_static_values: FlagsValue): GType;
export function flags_to_string(flags_type: GType, value: number): string;
export function gtype_get_type(): GType;
export function param_spec_boolean(
name: string,
nick: string,
blurb: string,
default_value: boolean,
flags: ParamFlags
): ParamSpec;
export function param_spec_boxed(
name: string,
nick: string,
blurb: string,
boxed_type: GType,
flags: ParamFlags
): ParamSpec;
export function param_spec_char(
name: string,
nick: string,
blurb: string,
minimum: number,
maximum: number,
default_value: number,
flags: ParamFlags
): ParamSpec;
export function param_spec_double(
name: string,
nick: string,
blurb: string,
minimum: number,
maximum: number,
default_value: number,
flags: ParamFlags
): ParamSpec;
export function param_spec_enum(
name: string,
nick: string,
blurb: string,
enum_type: GType,
default_value: number,
flags: ParamFlags
): ParamSpec;
export function param_spec_flags(
name: string,
nick: string,
blurb: string,
flags_type: GType,
default_value: number,
flags: ParamFlags
): ParamSpec;
export function param_spec_float(
name: string,
nick: string,
blurb: string,
minimum: number,
maximum: number,
default_value: number,
flags: ParamFlags
): ParamSpec;
export function param_spec_gtype(
name: string,
nick: string,
blurb: string,
is_a_type: GType,
flags: ParamFlags
): ParamSpec;
export function param_spec_int(
name: string,
nick: string,
blurb: string,
minimum: number,
maximum: number,
default_value: number,
flags: ParamFlags
): ParamSpec;
export function param_spec_int64(
name: string,
nick: string,
blurb: string,
minimum: number,
maximum: number,
default_value: number,
flags: ParamFlags
): ParamSpec;
export function param_spec_long(
name: string,
nick: string,
blurb: string,
minimum: number,
maximum: number,
default_value: number,
flags: ParamFlags
): ParamSpec;
export function param_spec_object(
name: string,
nick: string,
blurb: string,
object_type: GType,
flags: ParamFlags
): ParamSpec;
export function param_spec_param(
name: string,
nick: string,
blurb: string,
param_type: GType,
flags: ParamFlags
): ParamSpec;
export function param_spec_pointer(name: string, nick: string, blurb: string, flags: ParamFlags): ParamSpec;
export function param_spec_pool_new(type_prefixing: boolean): ParamSpecPool;
export function param_spec_string(
name: string,
nick: string,
blurb: string,
default_value: string | null,
flags: ParamFlags
): ParamSpec;
export function param_spec_uchar(
name: string,
nick: string,
blurb: string,
minimum: number,
maximum: number,
default_value: number,
flags: ParamFlags
): ParamSpec;
export function param_spec_uint(
name: string,
nick: string,
blurb: string,
minimum: number,
maximum: number,
default_value: number,
flags: ParamFlags
): ParamSpec;
export function param_spec_uint64(
name: string,
nick: string,
blurb: string,
minimum: number,
maximum: number,
default_value: number,
flags: ParamFlags
): ParamSpec;
export function param_spec_ulong(
name: string,
nick: string,
blurb: string,
minimum: number,
maximum: number,
default_value: number,
flags: ParamFlags
): ParamSpec;
export function param_spec_unichar(
name: string,
nick: string,
blurb: string,
default_value: number,
flags: ParamFlags
): ParamSpec;
export function param_spec_variant(
name: string,
nick: string,
blurb: string,
type: GLib.VariantType,
default_value: GLib.Variant | null,
flags: ParamFlags
): ParamSpec;
export function param_type_register_static(name: string, pspec_info: ParamSpecTypeInfo): GType;
export function param_value_convert(
pspec: ParamSpec,
src_value: any,
dest_value: any,
strict_validation: boolean
): boolean;
export function param_value_defaults(pspec: ParamSpec, value: any): boolean;
export function param_value_set_default(pspec: ParamSpec, value: any): void;
export function param_value_validate(pspec: ParamSpec, value: any): boolean;
export function param_values_cmp(pspec: ParamSpec, value1: any, value2: any): number;
export function pointer_type_register_static(name: string): GType;
export function signal_accumulator_first_wins(
ihint: SignalInvocationHint,
return_accu: any,
handler_return: any,
dummy?: any | null
): boolean;
export function signal_accumulator_true_handled(
ihint: SignalInvocationHint,
return_accu: any,
handler_return: any,
dummy?: any | null
): boolean;
export function signal_add_emission_hook(signal_id: number, detail: GLib.Quark, hook_func: SignalEmissionHook): number;
export function signal_chain_from_overridden(instance_and_params: Value[], return_value: any): void;
export function signal_connect_closure(
instance: Object,
detailed_signal: string,
closure: Closure,
after: boolean
): number;
export function signal_connect_closure_by_id(
instance: Object,
signal_id: number,
detail: GLib.Quark,
closure: Closure,
after: boolean
): number;
export function signal_emitv(
instance_and_params: Value[],
signal_id: number,
detail: GLib.Quark,
return_value?: any
): unknown;
export function signal_get_invocation_hint(instance: Object): SignalInvocationHint;
export function signal_handler_block(instance: Object, handler_id: number): void;
export function signal_handler_disconnect(instance: Object, handler_id: number): void;
export function signal_handler_is_connected(instance: Object, handler_id: number): boolean;
export function signal_handler_unblock(instance: Object, handler_id: number): void;
export function signal_handlers_block_matched(
instance: Object,
mask: SignalMatchType,
signal_id: number,
detail: GLib.Quark,
closure?: Closure | null,
func?: any | null,
data?: any | null
): number;
export function signal_handlers_destroy(instance: Object): void;
export function signal_handlers_disconnect_matched(
instance: Object,
mask: SignalMatchType,
signal_id: number,
detail: GLib.Quark,
closure?: Closure | null,
func?: any | null,
data?: any | null
): number;
export function signal_handlers_unblock_matched(
instance: Object,
mask: SignalMatchType,
signal_id: number,
detail: GLib.Quark,
closure?: Closure | null,
func?: any | null,
data?: any | null
): number;
export function signal_has_handler_pending(
instance: Object,
signal_id: number,
detail: GLib.Quark,
may_be_blocked: boolean
): boolean;
export function signal_is_valid_name(name: string): boolean;
export function signal_list_ids(itype: GType): number[];
export function signal_lookup(name: string, itype: GType): number;
export function signal_name(signal_id: number): string;
export function signal_override_class_closure(signal_id: number, instance_type: GType, class_closure: Closure): void;
export function signal_parse_name(
detailed_signal: string,
itype: GType,
force_detail_quark: boolean
): [boolean, number, GLib.Quark];
export function signal_query(signal_id: number): SignalQuery;
export function signal_remove_emission_hook(signal_id: number, hook_id: number): void;
export function signal_set_va_marshaller(
signal_id: number,
instance_type: GType,
va_marshaller: SignalCVaMarshaller
): void;
export function signal_stop_emission(instance: Object, signal_id: number, detail: GLib.Quark): void;
export function signal_stop_emission_by_name(instance: Object, detailed_signal: string): void;
export function signal_type_cclosure_new(itype: GType, struct_offset: number): Closure;
export function source_set_closure(source: GLib.Source, closure: Closure): void;
export function source_set_dummy_callback(source: GLib.Source): void;
export function strdup_value_contents(value: any): string;
export function type_add_class_private(class_type: GType, private_size: number): void;
export function type_add_instance_private(class_type: GType, private_size: number): number;
export function type_add_interface_dynamic(instance_type: GType, interface_type: GType, plugin: TypePlugin): void;
export function type_add_interface_static(instance_type: GType, interface_type: GType, info: InterfaceInfo): void;
export function type_check_class_is_a(g_class: TypeClass, is_a_type: GType): boolean;
export function type_check_instance(instance: TypeInstance): boolean;
export function type_check_instance_is_a(instance: TypeInstance, iface_type: GType): boolean;
export function type_check_instance_is_fundamentally_a(instance: TypeInstance, fundamental_type: GType): boolean;
export function type_check_is_value_type(type: GType): boolean;
export function type_check_value(value: any): boolean;
export function type_check_value_holds(value: any, type: GType): boolean;
export function type_children(type: GType): GType[];
export function type_class_adjust_private_offset(g_class: any | null, private_size_or_offset: number): void;
export function type_class_peek(type: GType): TypeClass;
export function type_class_peek_static(type: GType): TypeClass;
export function type_class_ref(type: GType): TypeClass;
export function type_default_interface_peek(g_type: GType): TypeInterface;
export function type_default_interface_ref(g_type: GType): TypeInterface;
export function type_default_interface_unref(g_iface: TypeInterface): void;
export function type_depth(type: GType): number;
export function type_ensure(type: GType): void;
export function type_free_instance(instance: TypeInstance): void;
export function type_from_name(name: string): GType;
export function type_fundamental(type_id: GType): GType;
export function type_fundamental_next(): GType;
export function type_get_instance_count(type: GType): number;
export function type_get_plugin(type: GType): TypePlugin;
export function type_get_qdata(type: GType, quark: GLib.Quark): any | null;
export function type_get_type_registration_serial(): number;
export function type_init(): void;
export function type_init_with_debug_flags(debug_flags: TypeDebugFlags): void;
export function type_interface_add_prerequisite(interface_type: GType, prerequisite_type: GType): void;
export function type_interface_get_plugin(instance_type: GType, interface_type: GType): TypePlugin;
export function type_interface_peek(instance_class: TypeClass, iface_type: GType): TypeInterface;
export function type_interface_prerequisites(interface_type: GType): GType[];
export function type_interfaces(type: GType): GType[];
export function type_is_a(type: GType, is_a_type: GType): boolean;
export function type_name(type: GType): string;
export function type_name_from_class(g_class: TypeClass): string;
export function type_name_from_instance(instance: TypeInstance): string;
export function type_next_base(leaf_type: GType, root_type: GType): GType;
export function type_parent(type: GType): GType;
export function type_qname(type: GType): GLib.Quark;
export function type_query(type: GType): TypeQuery;
export function type_register_dynamic(
parent_type: GType,
type_name: string,
plugin: TypePlugin,
flags: TypeFlags
): GType;
export function type_register_fundamental(
type_id: GType,
type_name: string,
info: TypeInfo,
finfo: TypeFundamentalInfo,
flags: TypeFlags
): GType;
export function type_register_static(parent_type: GType, type_name: string, info: TypeInfo, flags: TypeFlags): GType;
export function type_set_qdata(type: GType, quark: GLib.Quark, data?: any | null): void;
export function type_test_flags(type: GType, flags: number): boolean;
export function value_type_compatible(src_type: GType, dest_type: GType): boolean;
export function value_type_transformable(src_type: GType, dest_type: GType): boolean;
export type BaseFinalizeFunc = (g_class: TypeClass) => void;
export type BaseInitFunc = (g_class: TypeClass) => void;
export type BindingTransformFunc = (binding: Binding, from_value: any, to_value: any) => boolean;
export type BoxedCopyFunc = (boxed: any) => any;
export type BoxedFreeFunc = (boxed: any) => void;
export type Callback = () => void;
export type ClassFinalizeFunc = (g_class: TypeClass, class_data?: any | null) => void;
export type ClassInitFunc = (g_class: TypeClass, class_data?: any | null) => void;
export type ClosureMarshal = (
closure: Closure,
return_value: Value | null,
param_values: Value[],
invocation_hint?: any | null,
marshal_data?: any | null
) => void;
export type ClosureNotify = (data: any | null, closure: Closure) => void;
export type InstanceInitFunc = (instance: TypeInstance, g_class: TypeClass) => void;
export type InterfaceFinalizeFunc = (g_iface: TypeInterface, iface_data?: any | null) => void;
export type InterfaceInitFunc = (g_iface: TypeInterface, iface_data?: any | null) => void;
export type ObjectFinalizeFunc<A = Object> = (object: A) => void;
export type ObjectGetPropertyFunc<A = Object> = (object: A, property_id: number, value: any, pspec: ParamSpec) => void;
export type ObjectSetPropertyFunc<A = Object> = (object: A, property_id: number, value: any, pspec: ParamSpec) => void;
export type SignalAccumulator = (
ihint: SignalInvocationHint,
return_accu: any,
handler_return: any,
data?: any | null
) => boolean;
export type SignalEmissionHook = (ihint: SignalInvocationHint, param_values: Value[], data?: any | null) => boolean;
export type ToggleNotify<A = Object> = (data: any | null, object: A, is_last_ref: boolean) => void;
export type TypeClassCacheFunc = (cache_data: any | null, g_class: TypeClass) => boolean;
export type TypeInterfaceCheckFunc = (check_data: any | null, g_iface: TypeInterface) => void;
export type TypePluginCompleteInterfaceInfo = (
plugin: TypePlugin,
instance_type: GType,
interface_type: GType,
info: InterfaceInfo
) => void;
export type TypePluginCompleteTypeInfo = (
plugin: TypePlugin,
g_type: GType,
info: TypeInfo,
value_table: TypeValueTable
) => void;
export type TypePluginUnuse = (plugin: TypePlugin) => void;
export type TypePluginUse = (plugin: TypePlugin) => void;
export type ValueTransform = (src_value: any, dest_value: any) => void;
export type WeakNotify<A = Object> = (data: any | null, where_the_object_was: A) => void;
export namespace BindingFlags {
export const $gtype: GType<BindingFlags>;
}
export enum BindingFlags {
DEFAULT = 0,
BIDIRECTIONAL = 1,
SYNC_CREATE = 2,
INVERT_BOOLEAN = 4,
}
export namespace ConnectFlags {
export const $gtype: GType<ConnectFlags>;
}
export enum ConnectFlags {
AFTER = 1,
SWAPPED = 2,
}
export namespace ParamFlags {
export const $gtype: GType<ParamFlags>;
}
export enum ParamFlags {
READABLE = 1,
WRITABLE = 2,
READWRITE = 3,
CONSTRUCT = 4,
CONSTRUCT_ONLY = 8,
LAX_VALIDATION = 16,
STATIC_NAME = 32,
PRIVATE = 32,
STATIC_NICK = 64,
STATIC_BLURB = 128,
EXPLICIT_NOTIFY = 1073741824,
DEPRECATED = 2147483648,
}
export namespace SignalFlags {
export const $gtype: GType<SignalFlags>;
}
export enum SignalFlags {
RUN_FIRST = 1,
RUN_LAST = 2,
RUN_CLEANUP = 4,
NO_RECURSE = 8,
DETAILED = 16,
ACTION = 32,
NO_HOOKS = 64,
MUST_COLLECT = 128,
DEPRECATED = 256,
}
export namespace SignalMatchType {
export const $gtype: GType<SignalMatchType>;
}
export enum SignalMatchType {
ID = 1,
DETAIL = 2,
CLOSURE = 4,
FUNC = 8,
DATA = 16,
UNBLOCKED = 32,
}
export namespace TypeDebugFlags {
export const $gtype: GType<TypeDebugFlags>;
}
export enum TypeDebugFlags {
NONE = 0,
OBJECTS = 1,
SIGNALS = 2,
INSTANCE_COUNT = 4,
MASK = 7,
}
export namespace TypeFlags {
export const $gtype: GType<TypeFlags>;
}
export enum TypeFlags {
ABSTRACT = 16,
VALUE_ABSTRACT = 32,
}
export namespace TypeFundamentalFlags {
export const $gtype: GType<TypeFundamentalFlags>;
}
export enum TypeFundamentalFlags {
CLASSED = 1,
INSTANTIATABLE = 2,
DERIVABLE = 4,
DEEP_DERIVABLE = 8,
}
export module Binding {
export interface ConstructorProperties extends Object.ConstructorProperties {
[key: string]: any;
flags: BindingFlags;
source: Object;
source_property: string;
sourceProperty: string;
target: Object;
target_property: string;
targetProperty: string;
}
}
export class Binding extends Object {
static $gtype: GType<Binding>;
constructor(properties?: Partial<Binding.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<Binding.ConstructorProperties>, ...args: any[]): void;
// Properties
flags: BindingFlags;
source: Object;
source_property: string;
sourceProperty: string;
target: Object;
target_property: string;
targetProperty: string;
// Members
get_flags(): BindingFlags;
get_source<T = Object>(): T;
get_source_property(): string;
get_target<T = Object>(): T;
get_target_property(): string;
unbind(): void;
}
export module InitiallyUnowned {
export interface ConstructorProperties extends Object.ConstructorProperties {
[key: string]: any;
}
}
export class InitiallyUnowned extends Object {
static $gtype: GType<InitiallyUnowned>;
constructor(properties?: Partial<InitiallyUnowned.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<InitiallyUnowned.ConstructorProperties>, ...args: any[]): void;
// Fields
g_type_instance: TypeInstance;
}
export module Object {
export interface ConstructorProperties {
[key: string]: any;
}
}
export class Object {
static $gtype: GType<Object>;
constructor(properties?: Partial<Object.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<Object.ConstructorProperties>, ...args: any[]): void;
// Fields
g_type_instance: TypeInstance;
// Signals
connect(id: string, callback: (...args: any[]) => any): number;
connect_after(id: string, callback: (...args: any[]) => any): number;
emit(id: string, ...args: any[]): void;
connect(signal: "notify", callback: (_source: this, pspec: ParamSpec) => void): number;
connect_after(signal: "notify", callback: (_source: this, pspec: ParamSpec) => void): number;
emit(signal: "notify", pspec: ParamSpec): void;
// Constructors
static newv(object_type: GType, parameters: Parameter[]): Object;
// Members
bind_property(source_property: string, target: Object, target_property: string, flags: BindingFlags): Binding;
bind_property_full(
source_property: string,
target: Object,
target_property: string,
flags: BindingFlags,
transform_to?: BindingTransformFunc | null,
transform_from?: BindingTransformFunc | null,
notify?: GLib.DestroyNotify | null
): Binding;
bind_property_full(
source_property: string,
target: Object,
target_property: string,
flags: BindingFlags,
transform_to: Closure,
transform_from: Closure
): Binding;
force_floating(): void;
freeze_notify(): void;
get_data(key: string): any | null;
get_property(property_name: string): any;
get_qdata(quark: GLib.Quark): any | null;
getv(names: string[], values: Value[]): void;
is_floating(): boolean;
notify(property_name: string): void;
notify_by_pspec(pspec: ParamSpec): void;
ref(): Object;
ref_sink(): Object;
run_dispose(): void;
set_data(key: string, data?: any | null): void;
set_property(property_name: string, value: any): void;
steal_data(key: string): any | null;
steal_qdata(quark: GLib.Quark): any | null;
thaw_notify(): void;
unref(): void;
watch_closure(closure: Closure): void;
vfunc_constructed(): void;
vfunc_dispatch_properties_changed(n_pspecs: number, pspecs: ParamSpec): void;
vfunc_dispose(): void;
vfunc_finalize(): void;
vfunc_get_property(property_id: number, value: any, pspec: ParamSpec): void;
vfunc_notify(pspec: ParamSpec): void;
vfunc_set_property(property_id: number, value: any, pspec: ParamSpec): void;
static compat_control(what: number, data?: any | null): number;
static interface_find_property(g_iface: TypeInterface, property_name: string): ParamSpec;
static interface_install_property(g_iface: TypeInterface, pspec: ParamSpec): void;
static interface_list_properties(g_iface: TypeInterface): ParamSpec[];
static _classInit(klass: any): any;
disconnect(id: number): void;
set(properties: { [key: string]: any }): void;
block_signal_handler(id: number): any;
unblock_signal_handler(id: number): any;
stop_emission_by_name(detailedName: string): any;
}
export module ParamSpec {
export interface ConstructorProperties {
[key: string]: any;
}
}
export abstract class ParamSpec {
static $gtype: GType<ParamSpec>;
constructor(properties?: Partial<ParamSpec.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpec.ConstructorProperties>, ...args: any[]): void;
// Fields
g_type_instance: TypeInstance;
name: string;
flags: ParamFlags;
value_type: GType;
owner_type: GType;
static override: any;
// Members
get_blurb(): string;
get_default_value(): unknown;
get_name(): string;
get_name_quark(): GLib.Quark;
get_nick(): string;
get_qdata(quark: GLib.Quark): any | null;
get_redirect_target(): ParamSpec;
set_qdata(quark: GLib.Quark, data?: any | null): void;
sink(): void;
steal_qdata(quark: GLib.Quark): any | null;
vfunc_finalize(): void;
vfunc_value_set_default(value: any): void;
vfunc_value_validate(value: any): boolean;
vfunc_values_cmp(value1: any, value2: any): number;
static is_valid_name(name: string): boolean;
static char(
name: string,
nick: string,
blurb: string,
flags: ParamFlags,
minimum: number,
maximum: number,
defaultValue: number
): ParamSpecChar;
static uchar(
name: string,
nick: string,
blurb: string,
flags: ParamFlags,
minimum: number,
maximum: number,
defaultValue: number
): ParamSpecUChar;
static int(
name: string,
nick: string,
blurb: string,
flags: ParamFlags,
minimum: number,
maximum: number,
defaultValue: number
): ParamSpecInt;
static uint(
name: string,
nick: string,
blurb: string,
flags: ParamFlags,
minimum: number,
maximum: number,
defaultValue: number
): ParamSpecUInt;
static long(
name: string,
nick: string,
blurb: string,
flags: ParamFlags,
minimum: number,
maximum: number,
defaultValue: number
): ParamSpecLong;
static ulong(
name: string,
nick: string,
blurb: string,
flags: ParamFlags,
minimum: number,
maximum: number,
defaultValue: number
): ParamSpecULong;
static int64(
name: string,
nick: string,
blurb: string,
flags: ParamFlags,
minimum: number,
maximum: number,
defaultValue: number
): ParamSpecInt64;
static uint64(
name: string,
nick: string,
blurb: string,
flags: ParamFlags,
minimum: number,
maximum: number,
defaultValue: number
): ParamSpecUInt64;
static float(
name: string,
nick: string,
blurb: string,
flags: ParamFlags,
minimum: number,
maximum: number,
defaultValue: number
): ParamSpecFloat;
static boolean(
name: string,
nick: string,
blurb: string,
flags: ParamFlags,
defaultValue: boolean
): ParamSpecBoolean;
static flags(
name: string,
nick: string,
blurb: string,
flags: ParamFlags,
flagsType: any,
defaultValue: any
): ParamSpecFlags;
static enum(
name: string,
nick: string,
blurb: string,
flags: ParamFlags,
enumType: any,
defaultValue: any
): ParamSpecEnum;
static double(
name: string,
nick: string,
blurb: string,
flags: ParamFlags,
minimum: number,
maximum: number,
defaultValue: number
): ParamSpecDouble;
static string(name: string, nick: string, blurb: string, flags: ParamFlags, defaultValue: string): ParamSpecString;
static boxed(name: string, nick: string, blurb: string, flags: ParamFlags, boxedType: any): ParamSpecBoxed;
static object<T>(name: any, nick: any, blurb: any, flags: any, objectType: GType<T>): ParamSpecObject<T>;
static param(name: string, nick: string, blurb: string, flags: ParamFlags, paramType: any): ParamSpecParam;
}
export module ParamSpecBoolean {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecBoolean extends ParamSpec {
static $gtype: GType<ParamSpecBoolean>;
constructor(properties?: Partial<ParamSpecBoolean.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecBoolean.ConstructorProperties>, ...args: any[]): void;
// Fields
default_value: boolean;
}
export module ParamSpecBoxed {
export interface ConstructorProperties<A = unknown> extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecBoxed<A = unknown> extends ParamSpec {
static $gtype: GType<ParamSpecBoxed>;
constructor(properties?: Partial<ParamSpecBoxed.ConstructorProperties<A>>, ...args: any[]);
_init(properties?: Partial<ParamSpecBoxed.ConstructorProperties<A>>, ...args: any[]): void;
// Members
__type__(arg: never): A;
}
export module ParamSpecChar {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecChar extends ParamSpec {
static $gtype: GType<ParamSpecChar>;
constructor(properties?: Partial<ParamSpecChar.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecChar.ConstructorProperties>, ...args: any[]): void;
// Fields
minimum: number;
maximum: number;
default_value: number;
}
export module ParamSpecDouble {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecDouble extends ParamSpec {
static $gtype: GType<ParamSpecDouble>;
constructor(properties?: Partial<ParamSpecDouble.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecDouble.ConstructorProperties>, ...args: any[]): void;
// Fields
minimum: number;
maximum: number;
default_value: number;
epsilon: number;
}
export module ParamSpecEnum {
export interface ConstructorProperties<A = unknown> extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecEnum<A = unknown> extends ParamSpec {
static $gtype: GType<ParamSpecEnum>;
constructor(properties?: Partial<ParamSpecEnum.ConstructorProperties<A>>, ...args: any[]);
_init(properties?: Partial<ParamSpecEnum.ConstructorProperties<A>>, ...args: any[]): void;
// Fields
enum_class: EnumClass;
default_value: number;
// Members
__type__(arg: never): A;
}
export module ParamSpecFlags {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecFlags extends ParamSpec {
static $gtype: GType<ParamSpecFlags>;
constructor(properties?: Partial<ParamSpecFlags.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecFlags.ConstructorProperties>, ...args: any[]): void;
// Fields
flags_class: FlagsClass;
default_value: number;
}
export module ParamSpecFloat {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecFloat extends ParamSpec {
static $gtype: GType<ParamSpecFloat>;
constructor(properties?: Partial<ParamSpecFloat.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecFloat.ConstructorProperties>, ...args: any[]): void;
// Fields
minimum: number;
maximum: number;
default_value: number;
epsilon: number;
}
export module ParamSpecGType {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecGType extends ParamSpec {
static $gtype: GType<ParamSpecGType>;
constructor(properties?: Partial<ParamSpecGType.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecGType.ConstructorProperties>, ...args: any[]): void;
// Fields
is_a_type: GType;
}
export module ParamSpecInt {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecInt extends ParamSpec {
static $gtype: GType<ParamSpecInt>;
constructor(properties?: Partial<ParamSpecInt.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecInt.ConstructorProperties>, ...args: any[]): void;
// Fields
minimum: number;
maximum: number;
default_value: number;
}
export module ParamSpecInt64 {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecInt64 extends ParamSpec {
static $gtype: GType<ParamSpecInt64>;
constructor(properties?: Partial<ParamSpecInt64.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecInt64.ConstructorProperties>, ...args: any[]): void;
// Fields
minimum: number;
maximum: number;
default_value: number;
}
export module ParamSpecLong {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecLong extends ParamSpec {
static $gtype: GType<ParamSpecLong>;
constructor(properties?: Partial<ParamSpecLong.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecLong.ConstructorProperties>, ...args: any[]): void;
// Fields
minimum: number;
maximum: number;
default_value: number;
}
export module ParamSpecObject {
export interface ConstructorProperties<A = unknown> extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecObject<A = unknown> extends ParamSpec {
static $gtype: GType<ParamSpecObject>;
constructor(properties?: Partial<ParamSpecObject.ConstructorProperties<A>>, ...args: any[]);
_init(properties?: Partial<ParamSpecObject.ConstructorProperties<A>>, ...args: any[]): void;
// Members
__type__(arg: never): A;
}
export module ParamSpecOverride {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecOverride extends ParamSpec {
static $gtype: GType<ParamSpecOverride>;
constructor(properties?: Partial<ParamSpecOverride.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecOverride.ConstructorProperties>, ...args: any[]): void;
}
export module ParamSpecParam {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecParam extends ParamSpec {
static $gtype: GType<ParamSpecParam>;
constructor(properties?: Partial<ParamSpecParam.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecParam.ConstructorProperties>, ...args: any[]): void;
}
export module ParamSpecPointer {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecPointer extends ParamSpec {
static $gtype: GType<ParamSpecPointer>;
constructor(properties?: Partial<ParamSpecPointer.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecPointer.ConstructorProperties>, ...args: any[]): void;
}
export module ParamSpecString {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecString extends ParamSpec {
static $gtype: GType<ParamSpecString>;
constructor(properties?: Partial<ParamSpecString.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecString.ConstructorProperties>, ...args: any[]): void;
// Fields
default_value: string;
cset_first: string;
cset_nth: string;
substitutor: number;
null_fold_if_empty: number;
ensure_non_null: number;
}
export module ParamSpecUChar {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecUChar extends ParamSpec {
static $gtype: GType<ParamSpecUChar>;
constructor(properties?: Partial<ParamSpecUChar.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecUChar.ConstructorProperties>, ...args: any[]): void;
// Fields
minimum: number;
maximum: number;
default_value: number;
}
export module ParamSpecUInt {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecUInt extends ParamSpec {
static $gtype: GType<ParamSpecUInt>;
constructor(properties?: Partial<ParamSpecUInt.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecUInt.ConstructorProperties>, ...args: any[]): void;
// Fields
minimum: number;
maximum: number;
default_value: number;
}
export module ParamSpecUInt64 {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecUInt64 extends ParamSpec {
static $gtype: GType<ParamSpecUInt64>;
constructor(properties?: Partial<ParamSpecUInt64.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecUInt64.ConstructorProperties>, ...args: any[]): void;
// Fields
minimum: number;
maximum: number;
default_value: number;
}
export module ParamSpecULong {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecULong extends ParamSpec {
static $gtype: GType<ParamSpecULong>;
constructor(properties?: Partial<ParamSpecULong.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecULong.ConstructorProperties>, ...args: any[]): void;
// Fields
minimum: number;
maximum: number;
default_value: number;
}
export module ParamSpecUnichar {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecUnichar extends ParamSpec {
static $gtype: GType<ParamSpecUnichar>;
constructor(properties?: Partial<ParamSpecUnichar.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecUnichar.ConstructorProperties>, ...args: any[]): void;
// Fields
default_value: number;
}
export module ParamSpecValueArray {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecValueArray extends ParamSpec {
static $gtype: GType<ParamSpecValueArray>;
constructor(properties?: Partial<ParamSpecValueArray.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecValueArray.ConstructorProperties>, ...args: any[]): void;
// Fields
element_spec: ParamSpec;
fixed_n_elements: number;
}
export module ParamSpecVariant {
export interface ConstructorProperties extends ParamSpec.ConstructorProperties {
[key: string]: any;
}
}
export class ParamSpecVariant extends ParamSpec {
static $gtype: GType<ParamSpecVariant>;
constructor(properties?: Partial<ParamSpecVariant.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<ParamSpecVariant.ConstructorProperties>, ...args: any[]): void;
// Fields
type: GLib.VariantType;
default_value: GLib.Variant;
}
export module TypeModule {
export interface ConstructorProperties extends Object.ConstructorProperties {
[key: string]: any;
}
}
export abstract class TypeModule extends Object implements TypePlugin {
static $gtype: GType<TypeModule>;
constructor(properties?: Partial<TypeModule.ConstructorProperties>, ...args: any[]);
_init(properties?: Partial<TypeModule.ConstructorProperties>, ...args: any[]): void;
// Fields
use_count: number;
type_infos: any[];
interface_infos: any[];
name: string;
// Members
add_interface(instance_type: GType, interface_type: GType, interface_info: InterfaceInfo): void;
register_enum(name: string, const_static_values: EnumValue): GType;
register_flags(name: string, const_static_values: FlagsValue): GType;
register_type(parent_type: GType, type_name: string, type_info: TypeInfo, flags: TypeFlags): GType;
set_name(name: string): void;
unuse(): void;
use(): boolean;
use(...args: never[]): never;
vfunc_load(): boolean;
vfunc_unload(): void;
// Implemented Members
complete_interface_info(instance_type: GType, interface_type: GType, info: InterfaceInfo): void;
complete_type_info(g_type: GType, info: TypeInfo, value_table: TypeValueTable): void;
}
export class CClosure {
static $gtype: GType<CClosure>;
constructor(copy: CClosure);
// Fields
closure: Closure;
callback: any;
// Members
static marshal_BOOLEAN__BOXED_BOXED(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_BOOLEAN__FLAGS(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_STRING__OBJECT_POINTER(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__BOOLEAN(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__BOXED(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__CHAR(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__DOUBLE(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__ENUM(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__FLAGS(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__FLOAT(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__INT(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__LONG(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__OBJECT(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__PARAM(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__POINTER(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__STRING(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__UCHAR(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__UINT(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__UINT_POINTER(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__ULONG(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__VARIANT(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_VOID__VOID(
closure: Closure,
return_value: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
static marshal_generic(
closure: Closure,
return_gvalue: any,
n_param_values: number,
param_values: any,
invocation_hint?: any | null,
marshal_data?: any | null
): void;
}
export class ClosureNotifyData {
static $gtype: GType<ClosureNotifyData>;
constructor(copy: ClosureNotifyData);
// Fields
data: any;
notify: ClosureNotify;
}
export class EnumClass {
static $gtype: GType<EnumClass>;
constructor(copy: EnumClass);
// Fields
g_type_class: TypeClass;
minimum: number;
maximum: number;
n_values: number;
values: EnumValue;
}
export class EnumValue {
static $gtype: GType<EnumValue>;
constructor(
properties?: Partial<{
value?: number;
value_name?: string;
value_nick?: string;
}>
);
constructor(copy: EnumValue);
// Fields
value: number;
value_name: string;
value_nick: string;
}
export class FlagsClass {
static $gtype: GType<FlagsClass>;
constructor(copy: FlagsClass);
// Fields
g_type_class: TypeClass;
mask: number;
n_values: number;
values: FlagsValue;
}
export class FlagsValue {
static $gtype: GType<FlagsValue>;
constructor(
properties?: Partial<{
value?: number;
value_name?: string;
value_nick?: string;
}>
);
constructor(copy: FlagsValue);
// Fields
value: number;
value_name: string;
value_nick: string;
}
export class InterfaceInfo {
static $gtype: GType<InterfaceInfo>;
constructor(copy: InterfaceInfo);
// Fields
interface_init: InterfaceInitFunc;
interface_finalize: InterfaceFinalizeFunc;
interface_data: any;
}
export class ObjectConstructParam {
static $gtype: GType<ObjectConstructParam>;
constructor(copy: ObjectConstructParam);
// Fields
pspec: ParamSpec;
value: Value;
}
export class ParamSpecPool {
static $gtype: GType<ParamSpecPool>;
constructor(copy: ParamSpecPool);
// Members
insert(pspec: ParamSpec, owner_type: GType): void;
list(owner_type: GType): ParamSpec[];
list_owned(owner_type: GType): ParamSpec[];
lookup(param_name: string, owner_type: GType, walk_ancestors: boolean): ParamSpec;
remove(pspec: ParamSpec): void;
static new(type_prefixing: boolean): ParamSpecPool;
}
export class ParamSpecTypeInfo {
static $gtype: GType<ParamSpecTypeInfo>;
constructor(copy: ParamSpecTypeInfo);
// Fields
instance_size: number;
n_preallocs: number;
value_type: GType;
}
export class Parameter {
static $gtype: GType<Parameter>;
constructor(copy: Parameter);
// Fields
name: string;
value: Value;
}
export class SignalInvocationHint {
static $gtype: GType<SignalInvocationHint>;
constructor(copy: SignalInvocationHint);
// Fields
signal_id: number;
detail: GLib.Quark;
run_type: SignalFlags;
}
export class SignalQuery {
static $gtype: GType<SignalQuery>;
constructor(copy: SignalQuery);
// Fields
signal_id: number;
signal_name: string;
itype: GType;
signal_flags: SignalFlags;
return_type: GType;
n_params: number;
param_types: GType[];
}
export class TypeClass {
static $gtype: GType<TypeClass>;
constructor(copy: TypeClass);
// Fields
g_type: GType;
// Members
add_private(private_size: number): void;
get_private(private_type: GType): any | null;
peek_parent(): TypeClass;
unref(): void;
static adjust_private_offset(g_class: any | null, private_size_or_offset: number): void;
static peek(type: GType): TypeClass;
static peek_static(type: GType): TypeClass;
static ref(type: GType): TypeClass;
}
export class TypeFundamentalInfo {
static $gtype: GType<TypeFundamentalInfo>;
constructor(copy: TypeFundamentalInfo);
// Fields
type_flags: TypeFundamentalFlags;
}
export class TypeInfo {
static $gtype: GType<TypeInfo>;
constructor(copy: TypeInfo);
// Fields
class_size: number;
base_init: BaseInitFunc;
base_finalize: BaseFinalizeFunc;
class_init: ClassInitFunc;
class_finalize: ClassFinalizeFunc;
class_data: any;
instance_size: number;
n_preallocs: number;
instance_init: InstanceInitFunc;
value_table: TypeValueTable;
}
export class TypeInstance {
static $gtype: GType<TypeInstance>;
constructor(copy: TypeInstance);
// Fields
g_class: TypeClass;
// Members
get_private(private_type: GType): any | null;
}
export class TypeInterface {
static $gtype: GType<TypeInterface>;
constructor(copy: TypeInterface);
// Fields
g_type: GType;
g_instance_type: GType;
// Members
peek_parent(): TypeInterface;
static add_prerequisite(interface_type: GType, prerequisite_type: GType): void;
static get_plugin(instance_type: GType, interface_type: GType): TypePlugin;
static peek(instance_class: TypeClass, iface_type: GType): TypeInterface;
static prerequisites(interface_type: GType): GType[];
}
export class TypePluginClass {
static $gtype: GType<TypePluginClass>;
constructor(copy: TypePluginClass);
// Fields
base_iface: TypeInterface;
use_plugin: TypePluginUse;
unuse_plugin: TypePluginUnuse;
complete_type_info: TypePluginCompleteTypeInfo;
complete_interface_info: TypePluginCompleteInterfaceInfo;
}
export class TypeQuery {
static $gtype: GType<TypeQuery>;
constructor(copy: TypeQuery);
// Fields
type: GType;
type_name: string;
class_size: number;
instance_size: number;
}
export class TypeValueTable {
static $gtype: GType<TypeValueTable>;
constructor(
properties?: Partial<{
collect_format?: string;
lcopy_format?: string;
}>
);
constructor(copy: TypeValueTable);
// Fields
collect_format: string;
lcopy_format: string;
}
export type Value = any;
export class ValueArray {
static $gtype: GType<ValueArray>;
constructor(n_prealloced: number);
constructor(copy: ValueArray);
// Fields
n_values: number;
values: Value;
n_prealloced: number;
// Constructors
static ["new"](n_prealloced: number): ValueArray;
// Members
append(value?: Value | null): ValueArray;
copy(): ValueArray;
get_nth(index_: number): unknown;
insert(index_: number, value?: Value | null): ValueArray;
prepend(value?: Value | null): ValueArray;
remove(index_: number): ValueArray;
sort(compare_func: GLib.CompareFunc): ValueArray;
sort(compare_func: GLib.CompareDataFunc): ValueArray;
}
export class WeakRef {
static $gtype: GType<WeakRef>;
constructor(copy: WeakRef);
}
export class TypeCValue {
static $gtype: GType<TypeCValue>;
constructor(copy: TypeCValue);
}
export class _Value__data__union {
static $gtype: GType<_Value__data__union>;
constructor(
properties?: Partial<{
v_int?: number;
v_uint?: number;
v_long?: number;
v_ulong?: number;
v_int64?: number;
v_uint64?: number;
v_float?: number;
v_double?: number;
v_pointer?: any;
}>
);
constructor(copy: _Value__data__union);
// Fields
v_int: number;
v_uint: number;
v_long: number;
v_ulong: number;
v_int64: number;
v_uint64: number;
v_float: number;
v_double: number;
v_pointer: any;
}
export interface TypePluginNamespace {
$gtype: GType<TypePlugin>;
prototype: TypePluginPrototype;
}
export type TypePlugin = TypePluginPrototype;
export interface TypePluginPrototype extends Object {
// Members
complete_interface_info(instance_type: GType, interface_type: GType, info: InterfaceInfo): void;
complete_type_info(g_type: GType, info: TypeInfo, value_table: TypeValueTable): void;
unuse(): void;
use(): void;
}
export const TypePlugin: TypePluginNamespace;
export type SignalCMarshaller = ClosureMarshal;
export type SignalCVaMarshaller = unknown;
export type Type = number;
export type SignalMatch = SignalMatchPrototype;
export interface SignalMatchPrototype {
// Properties
signalId: string;
detail: string;
func: (...args: any[]) => any;
}
export type GType<T = unknown> = { __type__(arg: never): T };
export type Closure<R = any, P = any> = (...args: P[]) => R;
export function signal_handlers_block_by_func(instance: Object, func: (...args: any[]) => any): void;
export function signal_handlers_unblock_by_func(instance: Object, func: (...args: any[]) => any): void;
export function signal_handlers_disconnect_by_func(instance: Object, func: (...args: any[]) => any): void;
export function signal_handler_find(
...args:
| [Object, SignalMatch]
| [Object, SignalMatchType, number, GLib.Quark, Closure | null, object | null, object | null]
): number;
export function signal_handler_find(instance: Object, match: SignalMatch): number;
export function signal_handler_find(
instance: Object,
match: SignalMatchType,
signal_id: number,
detail: GLib.Quark,
closure: Closure | null,
func: object | null,
object: object | null
): number;
export function _real_signal_handler_find(
instance: Object,
match: SignalMatchType,
signal_id: number,
detail: GLib.Quark,
closure: Closure | null,
func: object | null,
object: object | null
): number;
export function signal_handler_block_matched(
...args:
| [Object, SignalMatch]
| [Object, SignalMatchType, number, GLib.Quark, Closure | null, object | null, object | null]
): number;
export function signal_handler_block_matched(instance: Object, match: SignalMatch): number;
export function signal_handler_block_matched(
instance: Object,
match: SignalMatchType,
signal_id: number,
detail: GLib.Quark,
closure: Closure | null,
func: object | null,
object: object | null
): number;
export function _real_signal_handler_block_matched(
instance: Object,
match: SignalMatchType,
signal_id: number,
detail: GLib.Quark,
closure: Closure | null,
func: object | null,
object: object | null
): number;
export function signal_handler_block_disconnect_matched(
...args:
| [Object, SignalMatch]
| [Object, SignalMatchType, number, GLib.Quark, Closure | null, object | null, object | null]
): number;
export function signal_handler_block_disconnect_matched(instance: Object, match: SignalMatch): number;
export function signal_handler_block_disconnect_matched(
instance: Object,
match: SignalMatchType,
signal_id: number,
detail: GLib.Quark,
closure: Closure | null,
func: object | null,
object: object | null
): number;
export function _real_signal_handler_block_disconnect_matched(
instance: Object,
match: SignalMatchType,
signal_id: number,
detail: GLib.Quark,
closure: Closure | null,
func: object | null,
object: object | null
): number;
export function signal_handler_block_unblock_matched(
...args:
| [Object, SignalMatch]
| [Object, SignalMatchType, number, GLib.Quark, Closure | null, object | null, object | null]
): number;
export function signal_handler_block_unblock_matched(instance: Object, match: SignalMatch): number;
export function signal_handler_block_unblock_matched(
instance: Object,
match: SignalMatchType,
signal_id: number,
detail: GLib.Quark,
closure: Closure | null,
func: object | null,
object: object | null
): number;
export function _real_signal_handler_block_unblock_matched(
instance: Object,
match: SignalMatchType,
signal_id: number,
detail: GLib.Quark,
closure: Closure | null,
func: object | null,
object: object | null
): number;
// GJS OVERRIDES
// Correctly types interface checks.
export function type_is_a<T extends Object>(obj: Object, is_a_type: { $gtype: GType<T> }): obj is T;
export class Interface {
static _classInit: (klass: any) => any;
__name__: string;
_construct: (params: any, ...otherArgs: any[]) => any;
_init: (params: any) => void;
$gtype?: GType;
}
export function signal_connect(object: Object, name: string, handler: Function): number;
export function signal_connect_after(object: Object, name: string, handler: Function): number;
export function signal_emit_by_name(object: Object, name: string, ...args: any[]): void;
export const __gtkCssName__: unique symbol;
export const __gtkTemplate__: unique symbol;
export const __gtkChildren__: unique symbol;
export const __gtkInternalChildren__: unique symbol;
// Expose GObject static properties for ES6 classes
export const GTypeName: unique symbol;
export const requires: unique symbol;
export const interfaces: unique symbol;
export const properties: unique symbol;
export const signals: unique symbol;
export enum AccumulatorType {
NONE,
FIRST_WINS,
TRUE_HANDLED,
}
export class NotImplementedError extends Error {}
export let gtypeNameBasedOnJSPath: boolean;
export let TYPE_BOOLEAN: GType<boolean>;
export let Boolean: BooleanConstructor;
export let TYPE_ENUM: GType<number>;
export let TYPE_FLAGS: GType<number>;
export let TYPE_DOUBLE: GType<number>;
export let Double: NumberConstructor;
export let TYPE_STRING: GType<string>;
export let String: StringConstructor;
declare global {
interface BooleanConstructor {
$gtype: GType<boolean>;
}
interface NumberConstructor {
$gtype: GType<number>;
}
interface StringConstructor {
$gtype: GType<string>;
}
}
export let TYPE_NONE: GType<undefined>;
export let TYPE_POINTER: GType<undefined>;
export let TYPE_BOXED: GType<unknown>;
export let TYPE_PARAM: GType<unknown>;
export let TYPE_INTERFACE: GType<unknown>;
export let TYPE_OBJECT: GType<object>;
export let TYPE_VARIANT: GType<GLib.Variant>;
export let TYPE_INT: GType<number>;
export let TYPE_UINT: GType<number>;
export let TYPE_INT64: GType<number>;
export let TYPE_UINT64: GType<number>;
export function registerClass<P extends {}, T extends new (...args: any[]) => P>(klass: T): RegisteredClass<T, {}, []>;
export type Property<K extends ParamSpec> = K extends ParamSpecBoolean
? boolean
: K extends ParamSpecDouble | ParamSpecInt | ParamSpecUInt | ParamSpecFloat | ParamSpecLong
? number
: K extends ParamSpecInt64 | ParamSpecUInt64 | ParamSpecULong
? number
: K extends ParamSpecFlags
? number
: K extends ParamSpecString | ParamSpecUnichar
? string
: K extends ParamSpecValueArray
? any[]
: K extends ParamSpecObject<infer T>
? T
: K extends ParamSpecEnum<infer E>
? E
: K extends ParamSpecBoxed<infer B>
? B
: K extends ParamSpecVariant
? GLib.Variant
: any;
export type Properties<Prototype extends {}, Properties extends { [key: string]: ParamSpec }> = Omit<
{
[key in keyof Properties | keyof Prototype]: key extends keyof Prototype
? never
: key extends keyof Properties
? Property<Properties[key]>
: never;
},
keyof Prototype
>;
export type SignalDefinition = {
param_types?: readonly GType[];
[key: string]: any;
};
type UnionToIntersection<T> = (T extends any ? (x: T) => any : never) extends (x: infer R) => any ? R : never;
type IFaces<Interfaces extends { $gtype: GType<any> }[]> = {
[key in keyof Interfaces]: Interfaces[key] extends { $gtype: GType<infer I> } ? I : never;
};
export type RegisteredPrototype<
P extends {},
Props extends { [key: string]: ParamSpec },
Interfaces extends any[]
> = Properties<P, SnakeToCamel<Props> & SnakeToUnderscore<Props>> & UnionToIntersection<Interfaces[number]> & P;
type SnakeToUnderscoreCase<S extends string> = S extends `${infer T}-${infer U}`
? `${T}_${SnakeToUnderscoreCase<U>}`
: S extends `${infer T}`
? `${T}`
: never;
type SnakeToCamelCase<S extends string> = S extends `${infer T}-${infer U}`
? `${Lowercase<T>}${SnakeToPascalCase<U>}`
: S extends `${infer T}`
? `${Lowercase<T>}`
: SnakeToPascalCase<S>;
type SnakeToPascalCase<S extends string> = string extends S
? string
: S extends `${infer T}-${infer U}`
? `${Capitalize<Lowercase<T>>}${SnakeToPascalCase<U>}`
: S extends `${infer T}`
? `${Capitalize<Lowercase<T>>}`
: never;
type SnakeToCamel<T> = { [P in keyof T as P extends string ? SnakeToCamelCase<P> : P]: T[P] };
type SnakeToUnderscore<T> = { [P in keyof T as P extends string ? SnakeToUnderscoreCase<P> : P]: T[P] };
type Ctor = new (...a: any[]) => object;
type Init = { _init(...args: any[]): void };
export type RegisteredClass<
T extends Ctor,
Props extends { [key: string]: ParamSpec },
Interfaces extends { $gtype: GType<any> }[]
> = T extends { prototype: infer P }
? {
$gtype: GType<RegisteredClass<T, Props, IFaces<Interfaces>>>;
new (...args: P extends Init ? Parameters<P["_init"]> : [void]): RegisteredPrototype<
P,
Props,
IFaces<Interfaces>
>;
prototype: RegisteredPrototype<P, Props, IFaces<Interfaces>>;
}
: never;
export function registerClass<
T extends Ctor,
Props extends { [key: string]: ParamSpec },
Interfaces extends { $gtype: GType }[],
Sigs extends {
[key: string]: {
param_types?: readonly GType[];
[key: string]: any;
};
}
>(
options: {
GTypeName?: string;
GTypeFlags?: TypeFlags;
Properties?: Props;
Signals?: Sigs;
Implements?: Interfaces;
CssName?: string;
Template?: string;
Children?: string[];
InternalChildren?: string[];
},
klass: T
): RegisteredClass<T, Props, Interfaces>; | the_stack |
import {
LROsPut200SucceededParameters,
LROsPatch200SucceededIgnoreHeadersParameters,
LROsPut201SucceededParameters,
LROsPost202ListParameters,
LROsPut200SucceededNoStateParameters,
LROsPut202Retry200Parameters,
LROsPut201CreatingSucceeded200Parameters,
LROsPut200UpdatingSucceeded204Parameters,
LROsPut201CreatingFailed200Parameters,
LROsPut200Acceptedcanceled200Parameters,
LROsPutNoHeaderInRetryParameters,
LROsPutAsyncRetrySucceededParameters,
LROsPutAsyncNoRetrySucceededParameters,
LROsPutAsyncRetryFailedParameters,
LROsPutAsyncNoRetrycanceledParameters,
LROsPutAsyncNoHeaderInRetryParameters,
LROsPutNonResourceParameters,
LROsPutAsyncNonResourceParameters,
LROsPutSubResourceParameters,
LROsPutAsyncSubResourceParameters,
LROsDeleteProvisioning202Accepted200SucceededParameters,
LROsDeleteProvisioning202DeletingFailed200Parameters,
LROsDeleteProvisioning202Deletingcanceled200Parameters,
LROsDelete204SucceededParameters,
LROsDelete202Retry200Parameters,
LROsDelete202NoRetry204Parameters,
LROsDeleteNoHeaderInRetryParameters,
LROsDeleteAsyncNoHeaderInRetryParameters,
LROsDeleteAsyncRetrySucceededParameters,
LROsDeleteAsyncNoRetrySucceededParameters,
LROsDeleteAsyncRetryFailedParameters,
LROsDeleteAsyncRetrycanceledParameters,
LROsPost200WithPayloadParameters,
LROsPost202Retry200Parameters,
LROsPost202NoRetry204Parameters,
LROsPostDoubleHeadersFinalLocationGetParameters,
LROsPostDoubleHeadersFinalAzureHeaderGetParameters,
LROsPostDoubleHeadersFinalAzureHeaderGetDefaultParameters,
LROsPostAsyncRetrySucceededParameters,
LROsPostAsyncNoRetrySucceededParameters,
LROsPostAsyncRetryFailedParameters,
LROsPostAsyncRetrycanceledParameters,
LRORetrysPut201CreatingSucceeded200Parameters,
LRORetrysPutAsyncRelativeRetrySucceededParameters,
LRORetrysDeleteProvisioning202Accepted200SucceededParameters,
LRORetrysDelete202Retry200Parameters,
LRORetrysDeleteAsyncRelativeRetrySucceededParameters,
LRORetrysPost202Retry200Parameters,
LRORetrysPostAsyncRelativeRetrySucceededParameters,
LrosaDsPutNonRetry400Parameters,
LrosaDsPutNonRetry201Creating400Parameters,
LrosaDsPutNonRetry201Creating400InvalidJsonParameters,
LrosaDsPutAsyncRelativeRetry400Parameters,
LrosaDsDeleteNonRetry400Parameters,
LrosaDsDelete202NonRetry400Parameters,
LrosaDsDeleteAsyncRelativeRetry400Parameters,
LrosaDsPostNonRetry400Parameters,
LrosaDsPost202NonRetry400Parameters,
LrosaDsPostAsyncRelativeRetry400Parameters,
LrosaDsPutError201NoProvisioningStatePayloadParameters,
LrosaDsPutAsyncRelativeRetryNoStatusParameters,
LrosaDsPutAsyncRelativeRetryNoStatusPayloadParameters,
LrosaDsDelete204SucceededParameters,
LrosaDsDeleteAsyncRelativeRetryNoStatusParameters,
LrosaDsPost202NoLocationParameters,
LrosaDsPostAsyncRelativeRetryNoPayloadParameters,
LrosaDsPut200InvalidJsonParameters,
LrosaDsPutAsyncRelativeRetryInvalidHeaderParameters,
LrosaDsPutAsyncRelativeRetryInvalidJsonPollingParameters,
LrosaDsDelete202RetryInvalidHeaderParameters,
LrosaDsDeleteAsyncRelativeRetryInvalidHeaderParameters,
LrosaDsDeleteAsyncRelativeRetryInvalidJsonPollingParameters,
LrosaDsPost202RetryInvalidHeaderParameters,
LrosaDsPostAsyncRelativeRetryInvalidHeaderParameters,
LrosaDsPostAsyncRelativeRetryInvalidJsonPollingParameters,
LROsCustomHeaderPutAsyncRetrySucceededParameters,
LROsCustomHeaderPut201CreatingSucceeded200Parameters,
LROsCustomHeaderPost202Retry200Parameters,
LROsCustomHeaderPostAsyncRetrySucceededParameters
} from "./parameters";
import {
LROsPut200Succeeded200Response,
LROsPut200Succeeded204Response,
LROsPut200SucceededdefaultResponse,
LROsPatch200SucceededIgnoreHeaders200Response,
LROsPatch200SucceededIgnoreHeadersdefaultResponse,
LROsPut201Succeeded201Response,
LROsPut201SucceededdefaultResponse,
LROsPost202List200Response,
LROsPost202List202Response,
LROsPost202ListdefaultResponse,
LROsPut200SucceededNoState200Response,
LROsPut200SucceededNoStatedefaultResponse,
LROsPut202Retry200202Response,
LROsPut202Retry200defaultResponse,
LROsPut201CreatingSucceeded200200Response,
LROsPut201CreatingSucceeded200201Response,
LROsPut201CreatingSucceeded200defaultResponse,
LROsPut200UpdatingSucceeded204200Response,
LROsPut200UpdatingSucceeded204defaultResponse,
LROsPut201CreatingFailed200200Response,
LROsPut201CreatingFailed200201Response,
LROsPut201CreatingFailed200defaultResponse,
LROsPut200Acceptedcanceled200200Response,
LROsPut200Acceptedcanceled200defaultResponse,
LROsPutNoHeaderInRetry202Response,
LROsPutNoHeaderInRetrydefaultResponse,
LROsPutAsyncRetrySucceeded200Response,
LROsPutAsyncRetrySucceededdefaultResponse,
LROsPutAsyncNoRetrySucceeded200Response,
LROsPutAsyncNoRetrySucceededdefaultResponse,
LROsPutAsyncRetryFailed200Response,
LROsPutAsyncRetryFaileddefaultResponse,
LROsPutAsyncNoRetrycanceled200Response,
LROsPutAsyncNoRetrycanceleddefaultResponse,
LROsPutAsyncNoHeaderInRetry201Response,
LROsPutAsyncNoHeaderInRetrydefaultResponse,
LROsPutNonResource202Response,
LROsPutNonResourcedefaultResponse,
LROsPutAsyncNonResource202Response,
LROsPutAsyncNonResourcedefaultResponse,
LROsPutSubResource202Response,
LROsPutSubResourcedefaultResponse,
LROsPutAsyncSubResource202Response,
LROsPutAsyncSubResourcedefaultResponse,
LROsDeleteProvisioning202Accepted200Succeeded200Response,
LROsDeleteProvisioning202Accepted200Succeeded202Response,
LROsDeleteProvisioning202Accepted200SucceededdefaultResponse,
LROsDeleteProvisioning202DeletingFailed200200Response,
LROsDeleteProvisioning202DeletingFailed200202Response,
LROsDeleteProvisioning202DeletingFailed200defaultResponse,
LROsDeleteProvisioning202Deletingcanceled200200Response,
LROsDeleteProvisioning202Deletingcanceled200202Response,
LROsDeleteProvisioning202Deletingcanceled200defaultResponse,
LROsDelete204Succeeded204Response,
LROsDelete204SucceededdefaultResponse,
LROsDelete202Retry200200Response,
LROsDelete202Retry200202Response,
LROsDelete202Retry200defaultResponse,
LROsDelete202NoRetry204200Response,
LROsDelete202NoRetry204202Response,
LROsDelete202NoRetry204defaultResponse,
LROsDeleteNoHeaderInRetry202Response,
LROsDeleteNoHeaderInRetry204Response,
LROsDeleteNoHeaderInRetrydefaultResponse,
LROsDeleteAsyncNoHeaderInRetry202Response,
LROsDeleteAsyncNoHeaderInRetry204Response,
LROsDeleteAsyncNoHeaderInRetrydefaultResponse,
LROsDeleteAsyncRetrySucceeded202Response,
LROsDeleteAsyncRetrySucceededdefaultResponse,
LROsDeleteAsyncNoRetrySucceeded202Response,
LROsDeleteAsyncNoRetrySucceededdefaultResponse,
LROsDeleteAsyncRetryFailed202Response,
LROsDeleteAsyncRetryFaileddefaultResponse,
LROsDeleteAsyncRetrycanceled202Response,
LROsDeleteAsyncRetrycanceleddefaultResponse,
LROsPost200WithPayload200Response,
LROsPost200WithPayload202Response,
LROsPost200WithPayloaddefaultResponse,
LROsPost202Retry200202Response,
LROsPost202Retry200defaultResponse,
LROsPost202NoRetry204202Response,
LROsPost202NoRetry204defaultResponse,
LROsPostDoubleHeadersFinalLocationGet202Response,
LROsPostDoubleHeadersFinalLocationGetdefaultResponse,
LROsPostDoubleHeadersFinalAzureHeaderGet202Response,
LROsPostDoubleHeadersFinalAzureHeaderGetdefaultResponse,
LROsPostDoubleHeadersFinalAzureHeaderGetDefault202Response,
LROsPostDoubleHeadersFinalAzureHeaderGetDefaultdefaultResponse,
LROsPostAsyncRetrySucceeded200Response,
LROsPostAsyncRetrySucceeded202Response,
LROsPostAsyncRetrySucceededdefaultResponse,
LROsPostAsyncNoRetrySucceeded200Response,
LROsPostAsyncNoRetrySucceeded202Response,
LROsPostAsyncNoRetrySucceededdefaultResponse,
LROsPostAsyncRetryFailed202Response,
LROsPostAsyncRetryFaileddefaultResponse,
LROsPostAsyncRetrycanceled202Response,
LROsPostAsyncRetrycanceleddefaultResponse,
LRORetrysPut201CreatingSucceeded200200Response,
LRORetrysPut201CreatingSucceeded200201Response,
LRORetrysPut201CreatingSucceeded200defaultResponse,
LRORetrysPutAsyncRelativeRetrySucceeded200Response,
LRORetrysPutAsyncRelativeRetrySucceededdefaultResponse,
LRORetrysDeleteProvisioning202Accepted200Succeeded200Response,
LRORetrysDeleteProvisioning202Accepted200Succeeded202Response,
LRORetrysDeleteProvisioning202Accepted200SucceededdefaultResponse,
LRORetrysDelete202Retry200202Response,
LRORetrysDelete202Retry200defaultResponse,
LRORetrysDeleteAsyncRelativeRetrySucceeded202Response,
LRORetrysDeleteAsyncRelativeRetrySucceededdefaultResponse,
LRORetrysPost202Retry200202Response,
LRORetrysPost202Retry200defaultResponse,
LRORetrysPostAsyncRelativeRetrySucceeded202Response,
LRORetrysPostAsyncRelativeRetrySucceededdefaultResponse,
LrosaDsPutNonRetry400200Response,
LrosaDsPutNonRetry400201Response,
LrosaDsPutNonRetry400defaultResponse,
LrosaDsPutNonRetry201Creating400200Response,
LrosaDsPutNonRetry201Creating400201Response,
LrosaDsPutNonRetry201Creating400defaultResponse,
LrosaDsPutNonRetry201Creating400InvalidJson200Response,
LrosaDsPutNonRetry201Creating400InvalidJson201Response,
LrosaDsPutNonRetry201Creating400InvalidJsondefaultResponse,
LrosaDsPutAsyncRelativeRetry400200Response,
LrosaDsPutAsyncRelativeRetry400defaultResponse,
LrosaDsDeleteNonRetry400202Response,
LrosaDsDeleteNonRetry400defaultResponse,
LrosaDsDelete202NonRetry400202Response,
LrosaDsDelete202NonRetry400defaultResponse,
LrosaDsDeleteAsyncRelativeRetry400202Response,
LrosaDsDeleteAsyncRelativeRetry400defaultResponse,
LrosaDsPostNonRetry400202Response,
LrosaDsPostNonRetry400defaultResponse,
LrosaDsPost202NonRetry400202Response,
LrosaDsPost202NonRetry400defaultResponse,
LrosaDsPostAsyncRelativeRetry400202Response,
LrosaDsPostAsyncRelativeRetry400defaultResponse,
LrosaDsPutError201NoProvisioningStatePayload200Response,
LrosaDsPutError201NoProvisioningStatePayload201Response,
LrosaDsPutError201NoProvisioningStatePayloaddefaultResponse,
LrosaDsPutAsyncRelativeRetryNoStatus200Response,
LrosaDsPutAsyncRelativeRetryNoStatusdefaultResponse,
LrosaDsPutAsyncRelativeRetryNoStatusPayload200Response,
LrosaDsPutAsyncRelativeRetryNoStatusPayloaddefaultResponse,
LrosaDsDelete204Succeeded204Response,
LrosaDsDelete204SucceededdefaultResponse,
LrosaDsDeleteAsyncRelativeRetryNoStatus202Response,
LrosaDsDeleteAsyncRelativeRetryNoStatusdefaultResponse,
LrosaDsPost202NoLocation202Response,
LrosaDsPost202NoLocationdefaultResponse,
LrosaDsPostAsyncRelativeRetryNoPayload202Response,
LrosaDsPostAsyncRelativeRetryNoPayloaddefaultResponse,
LrosaDsPut200InvalidJson200Response,
LrosaDsPut200InvalidJson204Response,
LrosaDsPut200InvalidJsondefaultResponse,
LrosaDsPutAsyncRelativeRetryInvalidHeader200Response,
LrosaDsPutAsyncRelativeRetryInvalidHeaderdefaultResponse,
LrosaDsPutAsyncRelativeRetryInvalidJsonPolling200Response,
LrosaDsPutAsyncRelativeRetryInvalidJsonPollingdefaultResponse,
LrosaDsDelete202RetryInvalidHeader202Response,
LrosaDsDelete202RetryInvalidHeaderdefaultResponse,
LrosaDsDeleteAsyncRelativeRetryInvalidHeader202Response,
LrosaDsDeleteAsyncRelativeRetryInvalidHeaderdefaultResponse,
LrosaDsDeleteAsyncRelativeRetryInvalidJsonPolling202Response,
LrosaDsDeleteAsyncRelativeRetryInvalidJsonPollingdefaultResponse,
LrosaDsPost202RetryInvalidHeader202Response,
LrosaDsPost202RetryInvalidHeaderdefaultResponse,
LrosaDsPostAsyncRelativeRetryInvalidHeader202Response,
LrosaDsPostAsyncRelativeRetryInvalidHeaderdefaultResponse,
LrosaDsPostAsyncRelativeRetryInvalidJsonPolling202Response,
LrosaDsPostAsyncRelativeRetryInvalidJsonPollingdefaultResponse,
LROsCustomHeaderPutAsyncRetrySucceeded200Response,
LROsCustomHeaderPutAsyncRetrySucceededdefaultResponse,
LROsCustomHeaderPut201CreatingSucceeded200200Response,
LROsCustomHeaderPut201CreatingSucceeded200201Response,
LROsCustomHeaderPut201CreatingSucceeded200defaultResponse,
LROsCustomHeaderPost202Retry200202Response,
LROsCustomHeaderPost202Retry200defaultResponse,
LROsCustomHeaderPostAsyncRetrySucceeded202Response,
LROsCustomHeaderPostAsyncRetrySucceededdefaultResponse
} from "./responses";
import { getClient, ClientOptions, Client } from "@azure-rest/core-client";
import "@azure/core-auth";
export interface LROsPut200Succeeded {
/** Long running put request, service returns a 200 to the initial request, with an entity that contains ProvisioningState=’Succeeded’. */
put(
options?: LROsPut200SucceededParameters
): Promise<
| LROsPut200Succeeded200Response
| LROsPut200Succeeded204Response
| LROsPut200SucceededdefaultResponse
>;
}
export interface LROsPatch200SucceededIgnoreHeaders {
/** Long running put request, service returns a 200 to the initial request with location header. We should not have any subsequent calls after receiving this first response. */
patch(
options?: LROsPatch200SucceededIgnoreHeadersParameters
): Promise<
| LROsPatch200SucceededIgnoreHeaders200Response
| LROsPatch200SucceededIgnoreHeadersdefaultResponse
>;
}
export interface LROsPut201Succeeded {
/** Long running put request, service returns a 201 to the initial request, with an entity that contains ProvisioningState=’Succeeded’. */
put(
options?: LROsPut201SucceededParameters
): Promise<
LROsPut201Succeeded201Response | LROsPut201SucceededdefaultResponse
>;
}
export interface LROsPost202List {
/** Long running put request, service returns a 202 with empty body to first request, returns a 200 with body [{ 'id': '100', 'name': 'foo' }]. */
post(
options?: LROsPost202ListParameters
): Promise<
| LROsPost202List200Response
| LROsPost202List202Response
| LROsPost202ListdefaultResponse
>;
}
export interface LROsPut200SucceededNoState {
/** Long running put request, service returns a 200 to the initial request, with an entity that does not contain ProvisioningState=’Succeeded’. */
put(
options?: LROsPut200SucceededNoStateParameters
): Promise<
| LROsPut200SucceededNoState200Response
| LROsPut200SucceededNoStatedefaultResponse
>;
}
export interface LROsPut202Retry200 {
/** Long running put request, service returns a 202 to the initial request, with a location header that points to a polling URL that returns a 200 and an entity that doesn't contains ProvisioningState */
put(
options?: LROsPut202Retry200Parameters
): Promise<LROsPut202Retry200202Response | LROsPut202Retry200defaultResponse>;
}
export interface LROsPut201CreatingSucceeded200 {
/** Long running put request, service returns a 201 to the initial request, with an entity that contains ProvisioningState=’Creating’. Polls return this value until the last poll returns a ‘200’ with ProvisioningState=’Succeeded’ */
put(
options?: LROsPut201CreatingSucceeded200Parameters
): Promise<
| LROsPut201CreatingSucceeded200200Response
| LROsPut201CreatingSucceeded200201Response
| LROsPut201CreatingSucceeded200defaultResponse
>;
}
export interface LROsPut200UpdatingSucceeded204 {
/** Long running put request, service returns a 201 to the initial request, with an entity that contains ProvisioningState=’Updating’. Polls return this value until the last poll returns a ‘200’ with ProvisioningState=’Succeeded’ */
put(
options?: LROsPut200UpdatingSucceeded204Parameters
): Promise<
| LROsPut200UpdatingSucceeded204200Response
| LROsPut200UpdatingSucceeded204defaultResponse
>;
}
export interface LROsPut201CreatingFailed200 {
/** Long running put request, service returns a 201 to the initial request, with an entity that contains ProvisioningState=’Created’. Polls return this value until the last poll returns a ‘200’ with ProvisioningState=’Failed’ */
put(
options?: LROsPut201CreatingFailed200Parameters
): Promise<
| LROsPut201CreatingFailed200200Response
| LROsPut201CreatingFailed200201Response
| LROsPut201CreatingFailed200defaultResponse
>;
}
export interface LROsPut200Acceptedcanceled200 {
/** Long running put request, service returns a 201 to the initial request, with an entity that contains ProvisioningState=’Creating’. Polls return this value until the last poll returns a ‘200’ with ProvisioningState=’Canceled’ */
put(
options?: LROsPut200Acceptedcanceled200Parameters
): Promise<
| LROsPut200Acceptedcanceled200200Response
| LROsPut200Acceptedcanceled200defaultResponse
>;
}
export interface LROsPutNoHeaderInRetry {
/** Long running put request, service returns a 202 to the initial request with location header. Subsequent calls to operation status do not contain location header. */
put(
options?: LROsPutNoHeaderInRetryParameters
): Promise<
LROsPutNoHeaderInRetry202Response | LROsPutNoHeaderInRetrydefaultResponse
>;
}
export interface LROsPutAsyncRetrySucceeded {
/** Long running put request, service returns a 200 to the initial request, with an entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
put(
options?: LROsPutAsyncRetrySucceededParameters
): Promise<
| LROsPutAsyncRetrySucceeded200Response
| LROsPutAsyncRetrySucceededdefaultResponse
>;
}
export interface LROsPutAsyncNoRetrySucceeded {
/** Long running put request, service returns a 200 to the initial request, with an entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
put(
options?: LROsPutAsyncNoRetrySucceededParameters
): Promise<
| LROsPutAsyncNoRetrySucceeded200Response
| LROsPutAsyncNoRetrySucceededdefaultResponse
>;
}
export interface LROsPutAsyncRetryFailed {
/** Long running put request, service returns a 200 to the initial request, with an entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
put(
options?: LROsPutAsyncRetryFailedParameters
): Promise<
LROsPutAsyncRetryFailed200Response | LROsPutAsyncRetryFaileddefaultResponse
>;
}
export interface LROsPutAsyncNoRetrycanceled {
/** Long running put request, service returns a 200 to the initial request, with an entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
put(
options?: LROsPutAsyncNoRetrycanceledParameters
): Promise<
| LROsPutAsyncNoRetrycanceled200Response
| LROsPutAsyncNoRetrycanceleddefaultResponse
>;
}
export interface LROsPutAsyncNoHeaderInRetry {
/** Long running put request, service returns a 202 to the initial request with Azure-AsyncOperation header. Subsequent calls to operation status do not contain Azure-AsyncOperation header. */
put(
options?: LROsPutAsyncNoHeaderInRetryParameters
): Promise<
| LROsPutAsyncNoHeaderInRetry201Response
| LROsPutAsyncNoHeaderInRetrydefaultResponse
>;
}
export interface LROsPutNonResource {
/** Long running put request with non resource. */
put(
options?: LROsPutNonResourceParameters
): Promise<LROsPutNonResource202Response | LROsPutNonResourcedefaultResponse>;
}
export interface LROsPutAsyncNonResource {
/** Long running put request with non resource. */
put(
options?: LROsPutAsyncNonResourceParameters
): Promise<
LROsPutAsyncNonResource202Response | LROsPutAsyncNonResourcedefaultResponse
>;
}
export interface LROsPutSubResource {
/** Long running put request with sub resource. */
put(
options?: LROsPutSubResourceParameters
): Promise<LROsPutSubResource202Response | LROsPutSubResourcedefaultResponse>;
}
export interface LROsPutAsyncSubResource {
/** Long running put request with sub resource. */
put(
options?: LROsPutAsyncSubResourceParameters
): Promise<
LROsPutAsyncSubResource202Response | LROsPutAsyncSubResourcedefaultResponse
>;
}
export interface LROsDeleteProvisioning202Accepted200Succeeded {
/** Long running delete request, service returns a 202 to the initial request, with an entity that contains ProvisioningState=’Accepted’. Polls return this value until the last poll returns a ‘200’ with ProvisioningState=’Succeeded’ */
delete(
options?: LROsDeleteProvisioning202Accepted200SucceededParameters
): Promise<
| LROsDeleteProvisioning202Accepted200Succeeded200Response
| LROsDeleteProvisioning202Accepted200Succeeded202Response
| LROsDeleteProvisioning202Accepted200SucceededdefaultResponse
>;
}
export interface LROsDeleteProvisioning202DeletingFailed200 {
/** Long running delete request, service returns a 202 to the initial request, with an entity that contains ProvisioningState=’Creating’. Polls return this value until the last poll returns a ‘200’ with ProvisioningState=’Failed’ */
delete(
options?: LROsDeleteProvisioning202DeletingFailed200Parameters
): Promise<
| LROsDeleteProvisioning202DeletingFailed200200Response
| LROsDeleteProvisioning202DeletingFailed200202Response
| LROsDeleteProvisioning202DeletingFailed200defaultResponse
>;
}
export interface LROsDeleteProvisioning202Deletingcanceled200 {
/** Long running delete request, service returns a 202 to the initial request, with an entity that contains ProvisioningState=’Creating’. Polls return this value until the last poll returns a ‘200’ with ProvisioningState=’Canceled’ */
delete(
options?: LROsDeleteProvisioning202Deletingcanceled200Parameters
): Promise<
| LROsDeleteProvisioning202Deletingcanceled200200Response
| LROsDeleteProvisioning202Deletingcanceled200202Response
| LROsDeleteProvisioning202Deletingcanceled200defaultResponse
>;
}
export interface LROsDelete204Succeeded {
/** Long running delete succeeds and returns right away */
delete(
options?: LROsDelete204SucceededParameters
): Promise<
LROsDelete204Succeeded204Response | LROsDelete204SucceededdefaultResponse
>;
}
export interface LROsDelete202Retry200 {
/** Long running delete request, service returns a 202 to the initial request. Polls return this value until the last poll returns a ‘200’ with ProvisioningState=’Succeeded’ */
delete(
options?: LROsDelete202Retry200Parameters
): Promise<
| LROsDelete202Retry200200Response
| LROsDelete202Retry200202Response
| LROsDelete202Retry200defaultResponse
>;
}
export interface LROsDelete202NoRetry204 {
/** Long running delete request, service returns a 202 to the initial request. Polls return this value until the last poll returns a ‘200’ with ProvisioningState=’Succeeded’ */
delete(
options?: LROsDelete202NoRetry204Parameters
): Promise<
| LROsDelete202NoRetry204200Response
| LROsDelete202NoRetry204202Response
| LROsDelete202NoRetry204defaultResponse
>;
}
export interface LROsDeleteNoHeaderInRetry {
/** Long running delete request, service returns a location header in the initial request. Subsequent calls to operation status do not contain location header. */
delete(
options?: LROsDeleteNoHeaderInRetryParameters
): Promise<
| LROsDeleteNoHeaderInRetry202Response
| LROsDeleteNoHeaderInRetry204Response
| LROsDeleteNoHeaderInRetrydefaultResponse
>;
}
export interface LROsDeleteAsyncNoHeaderInRetry {
/** Long running delete request, service returns an Azure-AsyncOperation header in the initial request. Subsequent calls to operation status do not contain Azure-AsyncOperation header. */
delete(
options?: LROsDeleteAsyncNoHeaderInRetryParameters
): Promise<
| LROsDeleteAsyncNoHeaderInRetry202Response
| LROsDeleteAsyncNoHeaderInRetry204Response
| LROsDeleteAsyncNoHeaderInRetrydefaultResponse
>;
}
export interface LROsDeleteAsyncRetrySucceeded {
/** Long running delete request, service returns a 202 to the initial request. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
delete(
options?: LROsDeleteAsyncRetrySucceededParameters
): Promise<
| LROsDeleteAsyncRetrySucceeded202Response
| LROsDeleteAsyncRetrySucceededdefaultResponse
>;
}
export interface LROsDeleteAsyncNoRetrySucceeded {
/** Long running delete request, service returns a 202 to the initial request. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
delete(
options?: LROsDeleteAsyncNoRetrySucceededParameters
): Promise<
| LROsDeleteAsyncNoRetrySucceeded202Response
| LROsDeleteAsyncNoRetrySucceededdefaultResponse
>;
}
export interface LROsDeleteAsyncRetryFailed {
/** Long running delete request, service returns a 202 to the initial request. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
delete(
options?: LROsDeleteAsyncRetryFailedParameters
): Promise<
| LROsDeleteAsyncRetryFailed202Response
| LROsDeleteAsyncRetryFaileddefaultResponse
>;
}
export interface LROsDeleteAsyncRetrycanceled {
/** Long running delete request, service returns a 202 to the initial request. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
delete(
options?: LROsDeleteAsyncRetrycanceledParameters
): Promise<
| LROsDeleteAsyncRetrycanceled202Response
| LROsDeleteAsyncRetrycanceleddefaultResponse
>;
}
export interface LROsPost200WithPayload {
/** Long running post request, service returns a 202 to the initial request, with 'Location' header. Poll returns a 200 with a response body after success. */
post(
options?: LROsPost200WithPayloadParameters
): Promise<
| LROsPost200WithPayload200Response
| LROsPost200WithPayload202Response
| LROsPost200WithPayloaddefaultResponse
>;
}
export interface LROsPost202Retry200 {
/** Long running post request, service returns a 202 to the initial request, with 'Location' and 'Retry-After' headers, Polls return a 200 with a response body after success */
post(
options?: LROsPost202Retry200Parameters
): Promise<
LROsPost202Retry200202Response | LROsPost202Retry200defaultResponse
>;
}
export interface LROsPost202NoRetry204 {
/** Long running post request, service returns a 202 to the initial request, with 'Location' header, 204 with noresponse body after success */
post(
options?: LROsPost202NoRetry204Parameters
): Promise<
LROsPost202NoRetry204202Response | LROsPost202NoRetry204defaultResponse
>;
}
export interface LROsPostDoubleHeadersFinalLocationGet {
/** Long running post request, service returns a 202 to the initial request with both Location and Azure-Async header. Poll Azure-Async and it's success. Should poll Location to get the final object */
post(
options?: LROsPostDoubleHeadersFinalLocationGetParameters
): Promise<
| LROsPostDoubleHeadersFinalLocationGet202Response
| LROsPostDoubleHeadersFinalLocationGetdefaultResponse
>;
}
export interface LROsPostDoubleHeadersFinalAzureHeaderGet {
/** Long running post request, service returns a 202 to the initial request with both Location and Azure-Async header. Poll Azure-Async and it's success. Should NOT poll Location to get the final object */
post(
options?: LROsPostDoubleHeadersFinalAzureHeaderGetParameters
): Promise<
| LROsPostDoubleHeadersFinalAzureHeaderGet202Response
| LROsPostDoubleHeadersFinalAzureHeaderGetdefaultResponse
>;
}
export interface LROsPostDoubleHeadersFinalAzureHeaderGetDefault {
/** Long running post request, service returns a 202 to the initial request with both Location and Azure-Async header. Poll Azure-Async and it's success. Should NOT poll Location to get the final object if you support initial Autorest behavior. */
post(
options?: LROsPostDoubleHeadersFinalAzureHeaderGetDefaultParameters
): Promise<
| LROsPostDoubleHeadersFinalAzureHeaderGetDefault202Response
| LROsPostDoubleHeadersFinalAzureHeaderGetDefaultdefaultResponse
>;
}
export interface LROsPostAsyncRetrySucceeded {
/** Long running post request, service returns a 202 to the initial request, with an entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
post(
options?: LROsPostAsyncRetrySucceededParameters
): Promise<
| LROsPostAsyncRetrySucceeded200Response
| LROsPostAsyncRetrySucceeded202Response
| LROsPostAsyncRetrySucceededdefaultResponse
>;
}
export interface LROsPostAsyncNoRetrySucceeded {
/** Long running post request, service returns a 202 to the initial request, with an entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
post(
options?: LROsPostAsyncNoRetrySucceededParameters
): Promise<
| LROsPostAsyncNoRetrySucceeded200Response
| LROsPostAsyncNoRetrySucceeded202Response
| LROsPostAsyncNoRetrySucceededdefaultResponse
>;
}
export interface LROsPostAsyncRetryFailed {
/** Long running post request, service returns a 202 to the initial request, with an entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
post(
options?: LROsPostAsyncRetryFailedParameters
): Promise<
| LROsPostAsyncRetryFailed202Response
| LROsPostAsyncRetryFaileddefaultResponse
>;
}
export interface LROsPostAsyncRetrycanceled {
/** Long running post request, service returns a 202 to the initial request, with an entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
post(
options?: LROsPostAsyncRetrycanceledParameters
): Promise<
| LROsPostAsyncRetrycanceled202Response
| LROsPostAsyncRetrycanceleddefaultResponse
>;
}
export interface LRORetrysPut201CreatingSucceeded200 {
/** Long running put request, service returns a 500, then a 201 to the initial request, with an entity that contains ProvisioningState=’Creating’. Polls return this value until the last poll returns a ‘200’ with ProvisioningState=’Succeeded’ */
put(
options?: LRORetrysPut201CreatingSucceeded200Parameters
): Promise<
| LRORetrysPut201CreatingSucceeded200200Response
| LRORetrysPut201CreatingSucceeded200201Response
| LRORetrysPut201CreatingSucceeded200defaultResponse
>;
}
export interface LRORetrysPutAsyncRelativeRetrySucceeded {
/** Long running put request, service returns a 500, then a 200 to the initial request, with an entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
put(
options?: LRORetrysPutAsyncRelativeRetrySucceededParameters
): Promise<
| LRORetrysPutAsyncRelativeRetrySucceeded200Response
| LRORetrysPutAsyncRelativeRetrySucceededdefaultResponse
>;
}
export interface LRORetrysDeleteProvisioning202Accepted200Succeeded {
/** Long running delete request, service returns a 500, then a 202 to the initial request, with an entity that contains ProvisioningState=’Accepted’. Polls return this value until the last poll returns a ‘200’ with ProvisioningState=’Succeeded’ */
delete(
options?: LRORetrysDeleteProvisioning202Accepted200SucceededParameters
): Promise<
| LRORetrysDeleteProvisioning202Accepted200Succeeded200Response
| LRORetrysDeleteProvisioning202Accepted200Succeeded202Response
| LRORetrysDeleteProvisioning202Accepted200SucceededdefaultResponse
>;
}
export interface LRORetrysDelete202Retry200 {
/** Long running delete request, service returns a 500, then a 202 to the initial request. Polls return this value until the last poll returns a ‘200’ with ProvisioningState=’Succeeded’ */
delete(
options?: LRORetrysDelete202Retry200Parameters
): Promise<
| LRORetrysDelete202Retry200202Response
| LRORetrysDelete202Retry200defaultResponse
>;
}
export interface LRORetrysDeleteAsyncRelativeRetrySucceeded {
/** Long running delete request, service returns a 500, then a 202 to the initial request. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
delete(
options?: LRORetrysDeleteAsyncRelativeRetrySucceededParameters
): Promise<
| LRORetrysDeleteAsyncRelativeRetrySucceeded202Response
| LRORetrysDeleteAsyncRelativeRetrySucceededdefaultResponse
>;
}
export interface LRORetrysPost202Retry200 {
/** Long running post request, service returns a 500, then a 202 to the initial request, with 'Location' and 'Retry-After' headers, Polls return a 200 with a response body after success */
post(
options?: LRORetrysPost202Retry200Parameters
): Promise<
| LRORetrysPost202Retry200202Response
| LRORetrysPost202Retry200defaultResponse
>;
}
export interface LRORetrysPostAsyncRelativeRetrySucceeded {
/** Long running post request, service returns a 500, then a 202 to the initial request, with an entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
post(
options?: LRORetrysPostAsyncRelativeRetrySucceededParameters
): Promise<
| LRORetrysPostAsyncRelativeRetrySucceeded202Response
| LRORetrysPostAsyncRelativeRetrySucceededdefaultResponse
>;
}
export interface LrosaDsPutNonRetry400 {
/** Long running put request, service returns a 400 to the initial request */
put(
options?: LrosaDsPutNonRetry400Parameters
): Promise<
| LrosaDsPutNonRetry400200Response
| LrosaDsPutNonRetry400201Response
| LrosaDsPutNonRetry400defaultResponse
>;
}
export interface LrosaDsPutNonRetry201Creating400 {
/** Long running put request, service returns a Product with 'ProvisioningState' = 'Creating' and 201 response code */
put(
options?: LrosaDsPutNonRetry201Creating400Parameters
): Promise<
| LrosaDsPutNonRetry201Creating400200Response
| LrosaDsPutNonRetry201Creating400201Response
| LrosaDsPutNonRetry201Creating400defaultResponse
>;
}
export interface LrosaDsPutNonRetry201Creating400InvalidJson {
/** Long running put request, service returns a Product with 'ProvisioningState' = 'Creating' and 201 response code */
put(
options?: LrosaDsPutNonRetry201Creating400InvalidJsonParameters
): Promise<
| LrosaDsPutNonRetry201Creating400InvalidJson200Response
| LrosaDsPutNonRetry201Creating400InvalidJson201Response
| LrosaDsPutNonRetry201Creating400InvalidJsondefaultResponse
>;
}
export interface LrosaDsPutAsyncRelativeRetry400 {
/** Long running put request, service returns a 200 with ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
put(
options?: LrosaDsPutAsyncRelativeRetry400Parameters
): Promise<
| LrosaDsPutAsyncRelativeRetry400200Response
| LrosaDsPutAsyncRelativeRetry400defaultResponse
>;
}
export interface LrosaDsDeleteNonRetry400 {
/** Long running delete request, service returns a 400 with an error body */
delete(
options?: LrosaDsDeleteNonRetry400Parameters
): Promise<
| LrosaDsDeleteNonRetry400202Response
| LrosaDsDeleteNonRetry400defaultResponse
>;
}
export interface LrosaDsDelete202NonRetry400 {
/** Long running delete request, service returns a 202 with a location header */
delete(
options?: LrosaDsDelete202NonRetry400Parameters
): Promise<
| LrosaDsDelete202NonRetry400202Response
| LrosaDsDelete202NonRetry400defaultResponse
>;
}
export interface LrosaDsDeleteAsyncRelativeRetry400 {
/** Long running delete request, service returns a 202 to the initial request. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
delete(
options?: LrosaDsDeleteAsyncRelativeRetry400Parameters
): Promise<
| LrosaDsDeleteAsyncRelativeRetry400202Response
| LrosaDsDeleteAsyncRelativeRetry400defaultResponse
>;
}
export interface LrosaDsPostNonRetry400 {
/** Long running post request, service returns a 400 with no error body */
post(
options?: LrosaDsPostNonRetry400Parameters
): Promise<
LrosaDsPostNonRetry400202Response | LrosaDsPostNonRetry400defaultResponse
>;
}
export interface LrosaDsPost202NonRetry400 {
/** Long running post request, service returns a 202 with a location header */
post(
options?: LrosaDsPost202NonRetry400Parameters
): Promise<
| LrosaDsPost202NonRetry400202Response
| LrosaDsPost202NonRetry400defaultResponse
>;
}
export interface LrosaDsPostAsyncRelativeRetry400 {
/** Long running post request, service returns a 202 to the initial request Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
post(
options?: LrosaDsPostAsyncRelativeRetry400Parameters
): Promise<
| LrosaDsPostAsyncRelativeRetry400202Response
| LrosaDsPostAsyncRelativeRetry400defaultResponse
>;
}
export interface LrosaDsPutError201NoProvisioningStatePayload {
/** Long running put request, service returns a 201 to the initial request with no payload */
put(
options?: LrosaDsPutError201NoProvisioningStatePayloadParameters
): Promise<
| LrosaDsPutError201NoProvisioningStatePayload200Response
| LrosaDsPutError201NoProvisioningStatePayload201Response
| LrosaDsPutError201NoProvisioningStatePayloaddefaultResponse
>;
}
export interface LrosaDsPutAsyncRelativeRetryNoStatus {
/** Long running put request, service returns a 200 to the initial request, with an entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
put(
options?: LrosaDsPutAsyncRelativeRetryNoStatusParameters
): Promise<
| LrosaDsPutAsyncRelativeRetryNoStatus200Response
| LrosaDsPutAsyncRelativeRetryNoStatusdefaultResponse
>;
}
export interface LrosaDsPutAsyncRelativeRetryNoStatusPayload {
/** Long running put request, service returns a 200 to the initial request, with an entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
put(
options?: LrosaDsPutAsyncRelativeRetryNoStatusPayloadParameters
): Promise<
| LrosaDsPutAsyncRelativeRetryNoStatusPayload200Response
| LrosaDsPutAsyncRelativeRetryNoStatusPayloaddefaultResponse
>;
}
export interface LrosaDsDelete204Succeeded {
/** Long running delete request, service returns a 204 to the initial request, indicating success. */
delete(
options?: LrosaDsDelete204SucceededParameters
): Promise<
| LrosaDsDelete204Succeeded204Response
| LrosaDsDelete204SucceededdefaultResponse
>;
}
export interface LrosaDsDeleteAsyncRelativeRetryNoStatus {
/** Long running delete request, service returns a 202 to the initial request. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
delete(
options?: LrosaDsDeleteAsyncRelativeRetryNoStatusParameters
): Promise<
| LrosaDsDeleteAsyncRelativeRetryNoStatus202Response
| LrosaDsDeleteAsyncRelativeRetryNoStatusdefaultResponse
>;
}
export interface LrosaDsPost202NoLocation {
/** Long running post request, service returns a 202 to the initial request, without a location header. */
post(
options?: LrosaDsPost202NoLocationParameters
): Promise<
| LrosaDsPost202NoLocation202Response
| LrosaDsPost202NoLocationdefaultResponse
>;
}
export interface LrosaDsPostAsyncRelativeRetryNoPayload {
/** Long running post request, service returns a 202 to the initial request, with an entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
post(
options?: LrosaDsPostAsyncRelativeRetryNoPayloadParameters
): Promise<
| LrosaDsPostAsyncRelativeRetryNoPayload202Response
| LrosaDsPostAsyncRelativeRetryNoPayloaddefaultResponse
>;
}
export interface LrosaDsPut200InvalidJson {
/** Long running put request, service returns a 200 to the initial request, with an entity that is not a valid json */
put(
options?: LrosaDsPut200InvalidJsonParameters
): Promise<
| LrosaDsPut200InvalidJson200Response
| LrosaDsPut200InvalidJson204Response
| LrosaDsPut200InvalidJsondefaultResponse
>;
}
export interface LrosaDsPutAsyncRelativeRetryInvalidHeader {
/** Long running put request, service returns a 200 to the initial request, with an entity that contains ProvisioningState=’Creating’. The endpoint indicated in the Azure-AsyncOperation header is invalid. */
put(
options?: LrosaDsPutAsyncRelativeRetryInvalidHeaderParameters
): Promise<
| LrosaDsPutAsyncRelativeRetryInvalidHeader200Response
| LrosaDsPutAsyncRelativeRetryInvalidHeaderdefaultResponse
>;
}
export interface LrosaDsPutAsyncRelativeRetryInvalidJsonPolling {
/** Long running put request, service returns a 200 to the initial request, with an entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
put(
options?: LrosaDsPutAsyncRelativeRetryInvalidJsonPollingParameters
): Promise<
| LrosaDsPutAsyncRelativeRetryInvalidJsonPolling200Response
| LrosaDsPutAsyncRelativeRetryInvalidJsonPollingdefaultResponse
>;
}
export interface LrosaDsDelete202RetryInvalidHeader {
/** Long running delete request, service returns a 202 to the initial request receing a reponse with an invalid 'Location' and 'Retry-After' headers */
delete(
options?: LrosaDsDelete202RetryInvalidHeaderParameters
): Promise<
| LrosaDsDelete202RetryInvalidHeader202Response
| LrosaDsDelete202RetryInvalidHeaderdefaultResponse
>;
}
export interface LrosaDsDeleteAsyncRelativeRetryInvalidHeader {
/** Long running delete request, service returns a 202 to the initial request. The endpoint indicated in the Azure-AsyncOperation header is invalid */
delete(
options?: LrosaDsDeleteAsyncRelativeRetryInvalidHeaderParameters
): Promise<
| LrosaDsDeleteAsyncRelativeRetryInvalidHeader202Response
| LrosaDsDeleteAsyncRelativeRetryInvalidHeaderdefaultResponse
>;
}
export interface LrosaDsDeleteAsyncRelativeRetryInvalidJsonPolling {
/** Long running delete request, service returns a 202 to the initial request. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
delete(
options?: LrosaDsDeleteAsyncRelativeRetryInvalidJsonPollingParameters
): Promise<
| LrosaDsDeleteAsyncRelativeRetryInvalidJsonPolling202Response
| LrosaDsDeleteAsyncRelativeRetryInvalidJsonPollingdefaultResponse
>;
}
export interface LrosaDsPost202RetryInvalidHeader {
/** Long running post request, service returns a 202 to the initial request, with invalid 'Location' and 'Retry-After' headers. */
post(
options?: LrosaDsPost202RetryInvalidHeaderParameters
): Promise<
| LrosaDsPost202RetryInvalidHeader202Response
| LrosaDsPost202RetryInvalidHeaderdefaultResponse
>;
}
export interface LrosaDsPostAsyncRelativeRetryInvalidHeader {
/** Long running post request, service returns a 202 to the initial request, with an entity that contains ProvisioningState=’Creating’. The endpoint indicated in the Azure-AsyncOperation header is invalid. */
post(
options?: LrosaDsPostAsyncRelativeRetryInvalidHeaderParameters
): Promise<
| LrosaDsPostAsyncRelativeRetryInvalidHeader202Response
| LrosaDsPostAsyncRelativeRetryInvalidHeaderdefaultResponse
>;
}
export interface LrosaDsPostAsyncRelativeRetryInvalidJsonPolling {
/** Long running post request, service returns a 202 to the initial request, with an entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
post(
options?: LrosaDsPostAsyncRelativeRetryInvalidJsonPollingParameters
): Promise<
| LrosaDsPostAsyncRelativeRetryInvalidJsonPolling202Response
| LrosaDsPostAsyncRelativeRetryInvalidJsonPollingdefaultResponse
>;
}
export interface LROsCustomHeaderPutAsyncRetrySucceeded {
/** x-ms-client-request-id = 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0 is required message header for all requests. Long running put request, service returns a 200 to the initial request, with an entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
put(
options?: LROsCustomHeaderPutAsyncRetrySucceededParameters
): Promise<
| LROsCustomHeaderPutAsyncRetrySucceeded200Response
| LROsCustomHeaderPutAsyncRetrySucceededdefaultResponse
>;
}
export interface LROsCustomHeaderPut201CreatingSucceeded200 {
/** x-ms-client-request-id = 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0 is required message header for all requests. Long running put request, service returns a 201 to the initial request, with an entity that contains ProvisioningState=’Creating’. Polls return this value until the last poll returns a ‘200’ with ProvisioningState=’Succeeded’ */
put(
options?: LROsCustomHeaderPut201CreatingSucceeded200Parameters
): Promise<
| LROsCustomHeaderPut201CreatingSucceeded200200Response
| LROsCustomHeaderPut201CreatingSucceeded200201Response
| LROsCustomHeaderPut201CreatingSucceeded200defaultResponse
>;
}
export interface LROsCustomHeaderPost202Retry200 {
/** x-ms-client-request-id = 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0 is required message header for all requests. Long running post request, service returns a 202 to the initial request, with 'Location' and 'Retry-After' headers, Polls return a 200 with a response body after success */
post(
options?: LROsCustomHeaderPost202Retry200Parameters
): Promise<
| LROsCustomHeaderPost202Retry200202Response
| LROsCustomHeaderPost202Retry200defaultResponse
>;
}
export interface LROsCustomHeaderPostAsyncRetrySucceeded {
/** x-ms-client-request-id = 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0 is required message header for all requests. Long running post request, service returns a 202 to the initial request, with an entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status */
post(
options?: LROsCustomHeaderPostAsyncRetrySucceededParameters
): Promise<
| LROsCustomHeaderPostAsyncRetrySucceeded202Response
| LROsCustomHeaderPostAsyncRetrySucceededdefaultResponse
>;
}
export interface Routes {
/** Resource for '/lro/put/200/succeeded' has methods for the following verbs: put */
(path: "/lro/put/200/succeeded"): LROsPut200Succeeded;
/** Resource for '/lro/patch/200/succeeded/ignoreheaders' has methods for the following verbs: patch */
(
path: "/lro/patch/200/succeeded/ignoreheaders"
): LROsPatch200SucceededIgnoreHeaders;
/** Resource for '/lro/put/201/succeeded' has methods for the following verbs: put */
(path: "/lro/put/201/succeeded"): LROsPut201Succeeded;
/** Resource for '/lro/list' has methods for the following verbs: post */
(path: "/lro/list"): LROsPost202List;
/** Resource for '/lro/put/200/succeeded/nostate' has methods for the following verbs: put */
(path: "/lro/put/200/succeeded/nostate"): LROsPut200SucceededNoState;
/** Resource for '/lro/put/202/retry/200' has methods for the following verbs: put */
(path: "/lro/put/202/retry/200"): LROsPut202Retry200;
/** Resource for '/lro/put/201/creating/succeeded/200' has methods for the following verbs: put */
(path: "/lro/put/201/creating/succeeded/200"): LROsPut201CreatingSucceeded200;
/** Resource for '/lro/put/200/updating/succeeded/200' has methods for the following verbs: put */
(path: "/lro/put/200/updating/succeeded/200"): LROsPut200UpdatingSucceeded204;
/** Resource for '/lro/put/201/created/failed/200' has methods for the following verbs: put */
(path: "/lro/put/201/created/failed/200"): LROsPut201CreatingFailed200;
/** Resource for '/lro/put/200/accepted/canceled/200' has methods for the following verbs: put */
(path: "/lro/put/200/accepted/canceled/200"): LROsPut200Acceptedcanceled200;
/** Resource for '/lro/put/noheader/202/200' has methods for the following verbs: put */
(path: "/lro/put/noheader/202/200"): LROsPutNoHeaderInRetry;
/** Resource for '/lro/putasync/retry/succeeded' has methods for the following verbs: put */
(path: "/lro/putasync/retry/succeeded"): LROsPutAsyncRetrySucceeded;
/** Resource for '/lro/putasync/noretry/succeeded' has methods for the following verbs: put */
(path: "/lro/putasync/noretry/succeeded"): LROsPutAsyncNoRetrySucceeded;
/** Resource for '/lro/putasync/retry/failed' has methods for the following verbs: put */
(path: "/lro/putasync/retry/failed"): LROsPutAsyncRetryFailed;
/** Resource for '/lro/putasync/noretry/canceled' has methods for the following verbs: put */
(path: "/lro/putasync/noretry/canceled"): LROsPutAsyncNoRetrycanceled;
/** Resource for '/lro/putasync/noheader/201/200' has methods for the following verbs: put */
(path: "/lro/putasync/noheader/201/200"): LROsPutAsyncNoHeaderInRetry;
/** Resource for '/lro/putnonresource/202/200' has methods for the following verbs: put */
(path: "/lro/putnonresource/202/200"): LROsPutNonResource;
/** Resource for '/lro/putnonresourceasync/202/200' has methods for the following verbs: put */
(path: "/lro/putnonresourceasync/202/200"): LROsPutAsyncNonResource;
/** Resource for '/lro/putsubresource/202/200' has methods for the following verbs: put */
(path: "/lro/putsubresource/202/200"): LROsPutSubResource;
/** Resource for '/lro/putsubresourceasync/202/200' has methods for the following verbs: put */
(path: "/lro/putsubresourceasync/202/200"): LROsPutAsyncSubResource;
/** Resource for '/lro/delete/provisioning/202/accepted/200/succeeded' has methods for the following verbs: delete */
(
path: "/lro/delete/provisioning/202/accepted/200/succeeded"
): LROsDeleteProvisioning202Accepted200Succeeded;
/** Resource for '/lro/delete/provisioning/202/deleting/200/failed' has methods for the following verbs: delete */
(
path: "/lro/delete/provisioning/202/deleting/200/failed"
): LROsDeleteProvisioning202DeletingFailed200;
/** Resource for '/lro/delete/provisioning/202/deleting/200/canceled' has methods for the following verbs: delete */
(
path: "/lro/delete/provisioning/202/deleting/200/canceled"
): LROsDeleteProvisioning202Deletingcanceled200;
/** Resource for '/lro/delete/204/succeeded' has methods for the following verbs: delete */
(path: "/lro/delete/204/succeeded"): LROsDelete204Succeeded;
/** Resource for '/lro/delete/202/retry/200' has methods for the following verbs: delete */
(path: "/lro/delete/202/retry/200"): LROsDelete202Retry200;
/** Resource for '/lro/delete/202/noretry/204' has methods for the following verbs: delete */
(path: "/lro/delete/202/noretry/204"): LROsDelete202NoRetry204;
/** Resource for '/lro/delete/noheader' has methods for the following verbs: delete */
(path: "/lro/delete/noheader"): LROsDeleteNoHeaderInRetry;
/** Resource for '/lro/deleteasync/noheader/202/204' has methods for the following verbs: delete */
(path: "/lro/deleteasync/noheader/202/204"): LROsDeleteAsyncNoHeaderInRetry;
/** Resource for '/lro/deleteasync/retry/succeeded' has methods for the following verbs: delete */
(path: "/lro/deleteasync/retry/succeeded"): LROsDeleteAsyncRetrySucceeded;
/** Resource for '/lro/deleteasync/noretry/succeeded' has methods for the following verbs: delete */
(path: "/lro/deleteasync/noretry/succeeded"): LROsDeleteAsyncNoRetrySucceeded;
/** Resource for '/lro/deleteasync/retry/failed' has methods for the following verbs: delete */
(path: "/lro/deleteasync/retry/failed"): LROsDeleteAsyncRetryFailed;
/** Resource for '/lro/deleteasync/retry/canceled' has methods for the following verbs: delete */
(path: "/lro/deleteasync/retry/canceled"): LROsDeleteAsyncRetrycanceled;
/** Resource for '/lro/post/payload/200' has methods for the following verbs: post */
(path: "/lro/post/payload/200"): LROsPost200WithPayload;
/** Resource for '/lro/post/202/retry/200' has methods for the following verbs: post */
(path: "/lro/post/202/retry/200"): LROsPost202Retry200;
/** Resource for '/lro/post/202/noretry/204' has methods for the following verbs: post */
(path: "/lro/post/202/noretry/204"): LROsPost202NoRetry204;
/** Resource for '/lro/LROPostDoubleHeadersFinalLocationGet' has methods for the following verbs: post */
(
path: "/lro/LROPostDoubleHeadersFinalLocationGet"
): LROsPostDoubleHeadersFinalLocationGet;
/** Resource for '/lro/LROPostDoubleHeadersFinalAzureHeaderGet' has methods for the following verbs: post */
(
path: "/lro/LROPostDoubleHeadersFinalAzureHeaderGet"
): LROsPostDoubleHeadersFinalAzureHeaderGet;
/** Resource for '/lro/LROPostDoubleHeadersFinalAzureHeaderGetDefault' has methods for the following verbs: post */
(
path: "/lro/LROPostDoubleHeadersFinalAzureHeaderGetDefault"
): LROsPostDoubleHeadersFinalAzureHeaderGetDefault;
/** Resource for '/lro/postasync/retry/succeeded' has methods for the following verbs: post */
(path: "/lro/postasync/retry/succeeded"): LROsPostAsyncRetrySucceeded;
/** Resource for '/lro/postasync/noretry/succeeded' has methods for the following verbs: post */
(path: "/lro/postasync/noretry/succeeded"): LROsPostAsyncNoRetrySucceeded;
/** Resource for '/lro/postasync/retry/failed' has methods for the following verbs: post */
(path: "/lro/postasync/retry/failed"): LROsPostAsyncRetryFailed;
/** Resource for '/lro/postasync/retry/canceled' has methods for the following verbs: post */
(path: "/lro/postasync/retry/canceled"): LROsPostAsyncRetrycanceled;
/** Resource for '/lro/retryerror/put/201/creating/succeeded/200' has methods for the following verbs: put */
(
path: "/lro/retryerror/put/201/creating/succeeded/200"
): LRORetrysPut201CreatingSucceeded200;
/** Resource for '/lro/retryerror/putasync/retry/succeeded' has methods for the following verbs: put */
(
path: "/lro/retryerror/putasync/retry/succeeded"
): LRORetrysPutAsyncRelativeRetrySucceeded;
/** Resource for '/lro/retryerror/delete/provisioning/202/accepted/200/succeeded' has methods for the following verbs: delete */
(
path: "/lro/retryerror/delete/provisioning/202/accepted/200/succeeded"
): LRORetrysDeleteProvisioning202Accepted200Succeeded;
/** Resource for '/lro/retryerror/delete/202/retry/200' has methods for the following verbs: delete */
(path: "/lro/retryerror/delete/202/retry/200"): LRORetrysDelete202Retry200;
/** Resource for '/lro/retryerror/deleteasync/retry/succeeded' has methods for the following verbs: delete */
(
path: "/lro/retryerror/deleteasync/retry/succeeded"
): LRORetrysDeleteAsyncRelativeRetrySucceeded;
/** Resource for '/lro/retryerror/post/202/retry/200' has methods for the following verbs: post */
(path: "/lro/retryerror/post/202/retry/200"): LRORetrysPost202Retry200;
/** Resource for '/lro/retryerror/postasync/retry/succeeded' has methods for the following verbs: post */
(
path: "/lro/retryerror/postasync/retry/succeeded"
): LRORetrysPostAsyncRelativeRetrySucceeded;
/** Resource for '/lro/nonretryerror/put/400' has methods for the following verbs: put */
(path: "/lro/nonretryerror/put/400"): LrosaDsPutNonRetry400;
/** Resource for '/lro/nonretryerror/put/201/creating/400' has methods for the following verbs: put */
(
path: "/lro/nonretryerror/put/201/creating/400"
): LrosaDsPutNonRetry201Creating400;
/** Resource for '/lro/nonretryerror/put/201/creating/400/invalidjson' has methods for the following verbs: put */
(
path: "/lro/nonretryerror/put/201/creating/400/invalidjson"
): LrosaDsPutNonRetry201Creating400InvalidJson;
/** Resource for '/lro/nonretryerror/putasync/retry/400' has methods for the following verbs: put */
(
path: "/lro/nonretryerror/putasync/retry/400"
): LrosaDsPutAsyncRelativeRetry400;
/** Resource for '/lro/nonretryerror/delete/400' has methods for the following verbs: delete */
(path: "/lro/nonretryerror/delete/400"): LrosaDsDeleteNonRetry400;
/** Resource for '/lro/nonretryerror/delete/202/retry/400' has methods for the following verbs: delete */
(
path: "/lro/nonretryerror/delete/202/retry/400"
): LrosaDsDelete202NonRetry400;
/** Resource for '/lro/nonretryerror/deleteasync/retry/400' has methods for the following verbs: delete */
(
path: "/lro/nonretryerror/deleteasync/retry/400"
): LrosaDsDeleteAsyncRelativeRetry400;
/** Resource for '/lro/nonretryerror/post/400' has methods for the following verbs: post */
(path: "/lro/nonretryerror/post/400"): LrosaDsPostNonRetry400;
/** Resource for '/lro/nonretryerror/post/202/retry/400' has methods for the following verbs: post */
(path: "/lro/nonretryerror/post/202/retry/400"): LrosaDsPost202NonRetry400;
/** Resource for '/lro/nonretryerror/postasync/retry/400' has methods for the following verbs: post */
(
path: "/lro/nonretryerror/postasync/retry/400"
): LrosaDsPostAsyncRelativeRetry400;
/** Resource for '/lro/error/put/201/noprovisioningstatepayload' has methods for the following verbs: put */
(
path: "/lro/error/put/201/noprovisioningstatepayload"
): LrosaDsPutError201NoProvisioningStatePayload;
/** Resource for '/lro/error/putasync/retry/nostatus' has methods for the following verbs: put */
(
path: "/lro/error/putasync/retry/nostatus"
): LrosaDsPutAsyncRelativeRetryNoStatus;
/** Resource for '/lro/error/putasync/retry/nostatuspayload' has methods for the following verbs: put */
(
path: "/lro/error/putasync/retry/nostatuspayload"
): LrosaDsPutAsyncRelativeRetryNoStatusPayload;
/** Resource for '/lro/error/delete/204/nolocation' has methods for the following verbs: delete */
(path: "/lro/error/delete/204/nolocation"): LrosaDsDelete204Succeeded;
/** Resource for '/lro/error/deleteasync/retry/nostatus' has methods for the following verbs: delete */
(
path: "/lro/error/deleteasync/retry/nostatus"
): LrosaDsDeleteAsyncRelativeRetryNoStatus;
/** Resource for '/lro/error/post/202/nolocation' has methods for the following verbs: post */
(path: "/lro/error/post/202/nolocation"): LrosaDsPost202NoLocation;
/** Resource for '/lro/error/postasync/retry/nopayload' has methods for the following verbs: post */
(
path: "/lro/error/postasync/retry/nopayload"
): LrosaDsPostAsyncRelativeRetryNoPayload;
/** Resource for '/lro/error/put/200/invalidjson' has methods for the following verbs: put */
(path: "/lro/error/put/200/invalidjson"): LrosaDsPut200InvalidJson;
/** Resource for '/lro/error/putasync/retry/invalidheader' has methods for the following verbs: put */
(
path: "/lro/error/putasync/retry/invalidheader"
): LrosaDsPutAsyncRelativeRetryInvalidHeader;
/** Resource for '/lro/error/putasync/retry/invalidjsonpolling' has methods for the following verbs: put */
(
path: "/lro/error/putasync/retry/invalidjsonpolling"
): LrosaDsPutAsyncRelativeRetryInvalidJsonPolling;
/** Resource for '/lro/error/delete/202/retry/invalidheader' has methods for the following verbs: delete */
(
path: "/lro/error/delete/202/retry/invalidheader"
): LrosaDsDelete202RetryInvalidHeader;
/** Resource for '/lro/error/deleteasync/retry/invalidheader' has methods for the following verbs: delete */
(
path: "/lro/error/deleteasync/retry/invalidheader"
): LrosaDsDeleteAsyncRelativeRetryInvalidHeader;
/** Resource for '/lro/error/deleteasync/retry/invalidjsonpolling' has methods for the following verbs: delete */
(
path: "/lro/error/deleteasync/retry/invalidjsonpolling"
): LrosaDsDeleteAsyncRelativeRetryInvalidJsonPolling;
/** Resource for '/lro/error/post/202/retry/invalidheader' has methods for the following verbs: post */
(
path: "/lro/error/post/202/retry/invalidheader"
): LrosaDsPost202RetryInvalidHeader;
/** Resource for '/lro/error/postasync/retry/invalidheader' has methods for the following verbs: post */
(
path: "/lro/error/postasync/retry/invalidheader"
): LrosaDsPostAsyncRelativeRetryInvalidHeader;
/** Resource for '/lro/error/postasync/retry/invalidjsonpolling' has methods for the following verbs: post */
(
path: "/lro/error/postasync/retry/invalidjsonpolling"
): LrosaDsPostAsyncRelativeRetryInvalidJsonPolling;
/** Resource for '/lro/customheader/putasync/retry/succeeded' has methods for the following verbs: put */
(
path: "/lro/customheader/putasync/retry/succeeded"
): LROsCustomHeaderPutAsyncRetrySucceeded;
/** Resource for '/lro/customheader/put/201/creating/succeeded/200' has methods for the following verbs: put */
(
path: "/lro/customheader/put/201/creating/succeeded/200"
): LROsCustomHeaderPut201CreatingSucceeded200;
/** Resource for '/lro/customheader/post/202/retry/200' has methods for the following verbs: post */
(
path: "/lro/customheader/post/202/retry/200"
): LROsCustomHeaderPost202Retry200;
/** Resource for '/lro/customheader/postasync/retry/succeeded' has methods for the following verbs: post */
(
path: "/lro/customheader/postasync/retry/succeeded"
): LROsCustomHeaderPostAsyncRetrySucceeded;
}
export type LRORestClientRestClient = Client & {
path: Routes;
};
export default function LRORestClient(
options: ClientOptions = {}
): LRORestClientRestClient {
const baseUrl = options.baseUrl ?? "http://localhost:3000";
const client = getClient(baseUrl, options) as LRORestClientRestClient;
return client;
} | the_stack |
import { PathAccessRole } from '@textile/hub';
export interface CreateFolderRequest {
/**
* Storage bucket to create the empty folder
*/
path: string;
/**
* Path in the bucket to create the empty folder
*/
bucket: string;
}
export interface ListDirectoryRequest {
/**
* Path in the bucket to fetch directories from
*/
path: string;
/**
* Storage bucket to fetch directory entries
*/
bucket: string;
/**
* set recursive to true, if you would like all children of folder entries
* to be recursively fetched.
*/
recursive?: boolean;
}
/**
* Represents a member on a shared file
*/
export interface FileMember {
publicKey:string;
address?:string;
role: PathAccessRole;
}
/**
* Represents an item stored in a storages directory
*/
export interface DirectoryEntry {
name: string;
path: string;
ipfsHash: string;
isDir: boolean;
sizeInBytes: number;
created: string;
updated: string;
fileExtension: string;
isLocallyAvailable: boolean;
backupCount: number;
members: FileMember[];
isBackupInProgress: boolean;
isRestoreInProgress: boolean;
uuid: string;
items?: DirectoryEntry[];
bucket:string;
dbId: string;
}
export interface ListDirectoryResponse {
items: DirectoryEntry[];
}
export interface OpenFileRequest {
path: string;
bucket: string;
/**
* progress callback if provided will be called with bytes read from
* remote while opening the file.
*
*/
progress?: (bytesRead?: number) => void;
}
export interface OpenUuidFileRequest {
uuid: string;
/**
* progress callback if provided will be called with bytes read from
* remote while opening the file.
*
*/
progress?: (bytesRead?: number) => void;
}
export interface MakeFilePublicRequest {
path: string;
bucket: string;
/**
* DbId where file is location, optional but required for instances where the file is a shared file.
*
*/
dbId?: string;
/**
* Specifies if public access to file should be accessible.
*
*/
allowAccess: boolean;
}
export interface OpenFileResponse {
stream: AsyncIterableIterator<Uint8Array>;
/**
* consumeStream aggregates the stream data and returns the compounded bytes array.
*
* Note that if the `stream` has already been consumed/used once, consumeStream would
* return an empty bytes array.
*/
consumeStream: () => Promise<Uint8Array>;
mimeType: string | undefined;
}
export interface OpenUuidFileResponse {
stream: AsyncIterableIterator<Uint8Array>;
/**
* consumeStream aggregates the stream data and returns the compounded bytes array.
*
* Note that if the `stream` has already been consumed/used once, consumeStream would
* return an empty bytes array.
*/
consumeStream: () => Promise<Uint8Array>;
mimeType: string | undefined;
/**
* Directory Entry representing the file this stream points to.
*
*/
entry: DirectoryEntry;
}
export type AddItemDataType = ReadableStream<Uint8Array> | ArrayBuffer | string | Blob;
export interface AddItemFile {
/**
* path in the bucket where the file should be uploaded.
* filename would be determined by the last segment in the path
* so path folder/a_file.txt would have the name `a_file.txt`
*
*/
path: string;
/**
* MimeType of the file being added.
* This value can be retrieved when opening the file later one.
*
*/
mimeType: string;
data: AddItemDataType;
/**
* progress callback if provided will be called with bytes written to
* remote while uploading the file.
*
*/
progress?: (bytesRead?: number) => void;
}
export interface AddItemsRequest {
bucket: string;
files: AddItemFile[];
}
export interface AddItemsStatus {
path: string;
status: 'success' | 'error';
/**
* Directory entry of uploaded file.
*
* Only present if status is 'success'.
*
*/
entry?: DirectoryEntry;
error?: Error;
}
export interface AddItemsResultSummary {
bucket: string;
files: AddItemsStatus[];
}
export type AddItemsEventData = AddItemsStatus | AddItemsResultSummary;
export type AddItemsEventType = 'data' | 'error' | 'done';
export type AddItemsListener = (data: AddItemsEventData) => void;
export interface AddItemsResponse {
on: (type: AddItemsEventType, listener: AddItemsListener) => void;
/**
* this function should only be used to listen for the `'done'` event, since the listener would only be called once.
* or else you could end up having functions leaking (unless you explicitly call the `off()` function).
*/
once: (type: AddItemsEventType, listener: AddItemsListener) => void;
off: (type: AddItemsEventType, listener: AddItemsListener) => void;
}
export interface MovePathsStatus {
sourcePath: string;
destPath: string;
status: 'success' | 'error';
error?: Error;
}
export interface MovePathsResultSummary {
count: number;
}
export type MovePathsEventData = MovePathsStatus | MovePathsResultSummary;
export type MovePathsEventType = 'data' | 'error' | 'done';
export type MovePathsListener = (data: MovePathsEventData) => void;
export interface MovePathsResponse {
on: (type: MovePathsEventType, listener: MovePathsListener) => void;
/**
* this function should only be used to listen for the `'done'` event, since the listener would only be called once.
* or else you could end up having functions leaking (unless you explicitly call the `off()` function).
*/
once: (type: MovePathsEventType, listener: MovePathsListener) => void;
off: (type: MovePathsEventType, listener: MovePathsListener) => void;
}
/**
* SharedWithMeFiles Represents a file created for the user
*
*/
export interface SharedWithMeFiles {
entry: DirectoryEntry;
/**
* sharedBy is the public key of the owner of the files
*
*/
sharedBy: string;
}
export interface GetFilesSharedWithMeResponse {
files: SharedWithMeFiles[];
nextOffset?: string;
}
export interface AcceptInvitationResponse {
files: SharedWithMeFiles[];
}
export interface GetFilesSharedByMeResponse {
files: SharedWithMeFiles[];
nextOffset?: string;
}
export interface GetRecentlySharedWithResponse {
members: FileMember[];
nextOffset?: string;
}
export interface TxlSubscribeBucketEvent {
bucketName: string;
status: 'success' | 'error';
error?: Error;
}
export type TxlSubscribeEventData = TxlSubscribeBucketEvent;
export type TxlSubscribeEventType = 'data' | 'error' | 'done';
export type TxlSubscribeListener = (data: TxlSubscribeEventData) => void;
export interface TxlSubscribeResponse {
on: (type: TxlSubscribeEventType, listener: TxlSubscribeListener) => void;
/**
* this function should only be used to listen for the `'done'` event, since the listener would only be called once.
* or else you could end up having functions leaking (unless you explicitly call the `off()` function).
*/
once: (type: TxlSubscribeEventType, listener: TxlSubscribeListener) => void;
off: (type: TxlSubscribeEventType, listener: TxlSubscribeListener) => void;
}
export interface NotificationSubscribeEvent {
notification: Notification;
status: 'success' | 'error';
error?: Error;
}
export type NotificationSubscribeEventData = NotificationSubscribeEvent;
export type NotificationSubscribeEventType = 'data' | 'error' | 'done';
export type NotificationSubscribeListener = (data: NotificationSubscribeEventData) => void;
export interface NotificationSubscribeResponse {
on: (type: NotificationSubscribeEventType, listener: NotificationSubscribeListener) => void;
/**
* this function should only be used to listen for the `'done'` event, since the listener would only be called once.
* or else you could end up having functions leaking (unless you explicitly call the `off()` function).
*/
once: (type: NotificationSubscribeEventType, listener: NotificationSubscribeListener) => void;
off: (type: NotificationSubscribeEventType, listener: NotificationSubscribeListener) => void;
}
/**
* FullPath represents full path information to a file.
* `dbId` is optional and only required for when re-sharing files in another db.
*/
export interface FullPath {
path: string;
bucket: string;
bucketKey?: string;
dbId?: string;
uuid?: string;
}
/**
* InvitationStatus represents the different statuses a file invitation could have
*/
export enum InvitationStatus {
PENDING = 0,
ACCEPTED,
REJECTED,
}
/**
* Invitation represents a file invitation
* `invitationID` is the same as the underlying message ID from Textile
*/
export interface Invitation {
inviterPublicKey: string;
inviteePublicKey: string;
invitationID?: string;
status: InvitationStatus;
itemPaths: FullPath[];
keys: string[];
}
/**
* Data object to represent public key of a user to share information with
*
*/
export interface SharePublicKeyInput {
/**
* A unique id provided by the client to identity this user.
* For example, it can be the users username or email.
*
*/
id: string;
/**
* pk should be a multibase or hex encoded version of the public key to share.
* It is also optional and can be left undefined. When undefined a temp key is generated
* for the id.
*
*/
pk?: string;
}
export interface ShareViaPublicKeyRequest {
/**
* Hex encoded public keys of users to share the specified files with.
*
*/
publicKeys: SharePublicKeyInput[];
paths: FullPath[];
}
export enum ShareKeyType {
Temp = 'temp',
Existing = 'existing',
}
/**
* Data object to represent public key of a user to share information with
*
*/
export interface SharePublicKeyOutput {
/**
* This is the same the same unique id provided by the client on the SharePublicKeyInput
*
*/
id: string;
/**
* Multibase base32 encoded public key of user.
*
*/
pk: string;
/**
* Type is an enum that is ShareKeyType.Temp or ShareKeyType.Existing
*
* 'temp' is when the input doesn't provide a valid 'pk'
* 'existing' is when the input had a `pk` set.
*
* It's useful for the user of the sdk to determine what type of action to be performed.
*/
type: ShareKeyType;
/**
* Temporary access key for temp key types. To be used by user to access the invite
*
*/
tempKey?: string;
}
// eslint-disable-next-line @typescript-eslint/no-empty-interface
export interface ShareViaPublicKeyResponse {
publicKeys: SharePublicKeyOutput[];
}
export enum NotificationType {
UNKNOWN = 0,
INVITATION = 1,
USAGEALERT = 2,
INVITATION_REPLY = 3,
REVOKED_INVITATION = 4,
}
export interface Notification {
id: string;
from: string;
to: string;
body: Uint8Array;
decryptedBody: Uint8Array;
type: NotificationType;
createdAt: number;
readAt?: number;
relatedObject?: Invitation;
}
export interface GetNotificationsResponse {
notifications: Notification[];
nextOffset: string;
lastSeenAt: number;
} | the_stack |
import {
cmp as cmp_,
onConsentChange as onConsentChange_,
} from '@guardian/consent-management-platform';
import type { Callback } from '@guardian/consent-management-platform/dist/types';
import type { TCFv2ConsentState } from '@guardian/consent-management-platform/dist/types/tcfv2';
import { setCookie, storage } from '@guardian/libs';
import { getReferrer as getReferrer_ } from '../../../../lib/detect';
import { getCountryCode as getCountryCode_ } from '../../../../lib/geolocation';
import { getPrivacyFramework as getPrivacyFramework_ } from '../../../../lib/getPrivacyFramework';
import { getSynchronousParticipations as getSynchronousParticipations_ } from '../experiments/ab';
import { isUserLoggedIn as isUserLoggedIn_ } from '../identity/api';
import { _, getPageTargeting } from './build-page-targeting';
import { commercialFeatures } from './commercial-features';
const getSynchronousParticipations =
getSynchronousParticipations_ as jest.MockedFunction<
typeof getSynchronousParticipations_
>;
const getReferrer = getReferrer_ as jest.MockedFunction<typeof getReferrer_>;
const isUserLoggedIn = isUserLoggedIn_ as jest.MockedFunction<
typeof isUserLoggedIn_
>;
const getCountryCode = getCountryCode_ as jest.MockedFunction<
typeof getCountryCode_
>;
const getPrivacyFramework = getPrivacyFramework_ as jest.MockedFunction<
typeof getPrivacyFramework_
>;
const cmp = {
hasInitialised: cmp_.hasInitialised as jest.MockedFunction<
typeof cmp_.hasInitialised
>,
willShowPrivacyMessageSync:
cmp_.willShowPrivacyMessageSync as jest.MockedFunction<
typeof cmp_.willShowPrivacyMessageSync
>,
};
const onConsentChange = onConsentChange_ as jest.MockedFunction<
typeof onConsentChange_
>;
type UnknownFunc = (...args: unknown[]) => unknown;
jest.mock('../../../../lib/config');
jest.mock('../../../../lib/detect', () => ({
getReferrer: jest.fn(),
hasPushStateSupport: jest.fn(),
}));
jest.mock('../../../../lib/geolocation', () => ({
getCountryCode: jest.fn(),
}));
jest.mock('../../../../lib/getPrivacyFramework', () => ({
getPrivacyFramework: jest.fn(),
}));
jest.mock('../identity/api', () => ({
isUserLoggedIn: jest.fn(),
}));
jest.mock('../experiments/ab', () => ({
getSynchronousParticipations: jest.fn(),
}));
jest.mock('lodash-es/once', () => (fn: UnknownFunc) => fn);
jest.mock('lodash-es/memoize', () => (fn: UnknownFunc) => fn);
jest.mock('./commercial-features', () => ({
commercialFeatures() {
// do nothing;
},
}));
jest.mock('@guardian/consent-management-platform', () => ({
onConsentChange: jest.fn(),
cmp: {
hasInitialised: jest.fn(),
willShowPrivacyMessageSync: jest.fn(),
},
}));
const mockViewport = (width: number, height: number): void => {
Object.defineProperties(window, {
innerWidth: {
value: width,
},
innerHeight: {
value: height,
},
});
};
// CCPA
const ccpaWithConsentMock = (callback: Callback): void =>
callback({ ccpa: { doNotSell: false } });
const ccpaWithoutConsentMock = (callback: Callback): void =>
callback({ ccpa: { doNotSell: true } });
// AUS
const ausWithConsentMock = (callback: Callback) =>
callback({ aus: { personalisedAdvertising: true } });
const ausWithoutConsentMock = (callback: Callback) =>
callback({ aus: { personalisedAdvertising: false } });
// TCFv2
const defaultState: TCFv2ConsentState = {
consents: { 1: false },
eventStatus: 'tcloaded',
vendorConsents: { abc: false },
addtlConsent: 'xyz',
gdprApplies: true,
tcString: 'YAAA',
};
const tcfv2WithConsentMock = (callback: Callback): void =>
callback({
tcfv2: {
...defaultState,
consents: { '1': true, '2': true },
eventStatus: 'useractioncomplete',
},
});
const tcfv2WithoutConsentMock = (callback: Callback): void =>
callback({
tcfv2: { ...defaultState, consents: {}, eventStatus: 'cmpuishown' },
});
const tcfv2NullConsentMock = (callback: Callback): void =>
callback({ tcfv2: undefined });
const tcfv2MixedConsentMock = (callback: Callback): void =>
callback({
tcfv2: {
...defaultState,
consents: { '1': false, '2': true },
eventStatus: 'useractioncomplete',
},
});
describe('Build Page Targeting', () => {
beforeEach(() => {
window.guardian.config.page = {
authorIds: 'profile/gabrielle-chan',
blogIds: 'a/blog',
contentType: 'Video',
edition: 'US',
keywordIds:
'uk-news/prince-charles-letters,uk/uk,uk/prince-charles',
pageId: 'football/series/footballweekly',
publication: 'The Observer',
seriesId: 'film/series/filmweekly',
sponsorshipType: 'advertisement-features',
tones: 'News',
videoDuration: 63,
sharedAdTargeting: {
bl: ['blog'],
br: 'p',
co: ['gabrielle-chan'],
ct: 'video',
edition: 'us',
k: ['prince-charles-letters', 'uk/uk', 'prince-charles'],
ob: 't',
p: 'ng',
se: ['filmweekly'],
su: ['5'],
tn: ['news'],
url: '/football/series/footballweekly',
},
isSensitive: false,
// isHosted: true,
// isDev: true,
// isFront: false,
// ajaxUrl: '/dummy/',
// hasPageSkin: false,
// assetsPath: '/dummy/',
// section: 'unknown',
// pbIndexSites: [],
// adUnit: 'none',
} as unknown as PageConfig;
window.guardian.config.ophan = { pageViewId: 'presetOphanPageViewId' };
commercialFeatures.adFree = false;
setCookie({ name: 'adtest', value: 'ng101' });
// Reset mocking to default values.
_.resetPageTargeting();
onConsentChange.mockImplementation(tcfv2NullConsentMock);
getReferrer.mockReturnValue('');
mockViewport(0, 0);
isUserLoggedIn.mockReturnValue(true);
getSynchronousParticipations.mockReturnValue({
MtMaster: {
variant: 'variantName',
},
});
storage.local.setRaw('gu.alreadyVisited', String(0));
getCountryCode.mockReturnValue('US');
getPrivacyFramework.mockReturnValue({ ccpa: true });
jest.spyOn(global.Math, 'random').mockReturnValue(0.5);
expect.hasAssertions();
});
afterEach(() => {
jest.spyOn(global.Math, 'random').mockRestore();
jest.resetAllMocks();
});
it('should exist', () => {
expect(getPageTargeting).toBeDefined();
});
it('should build correct page targeting', () => {
const pageTargeting = getPageTargeting();
expect(pageTargeting.sens).toBe('f');
expect(pageTargeting.edition).toBe('us');
expect(pageTargeting.ct).toBe('video');
expect(pageTargeting.p).toBe('ng');
expect(pageTargeting.su).toEqual(['5']);
expect(pageTargeting.bp).toBe('mobile');
expect(pageTargeting.at).toBe('ng101');
expect(pageTargeting.si).toEqual('t');
expect(pageTargeting.co).toEqual(['gabrielle-chan']);
expect(pageTargeting.bl).toEqual(['blog']);
expect(pageTargeting.tn).toEqual(['news']);
expect(pageTargeting.vl).toEqual('90');
expect(pageTargeting.pv).toEqual('presetOphanPageViewId');
expect(pageTargeting.pa).toEqual('f');
expect(pageTargeting.cc).toEqual('US');
expect(pageTargeting.rp).toEqual('dotcom-platform');
});
it('should set correct personalized ad (pa) param', () => {
onConsentChange.mockImplementation(tcfv2WithConsentMock);
expect(getPageTargeting().pa).toBe('t');
_.resetPageTargeting();
onConsentChange.mockImplementation(tcfv2WithoutConsentMock);
expect(getPageTargeting().pa).toBe('f');
_.resetPageTargeting();
onConsentChange.mockImplementation(tcfv2NullConsentMock);
expect(getPageTargeting().pa).toBe('f');
_.resetPageTargeting();
onConsentChange.mockImplementation(tcfv2MixedConsentMock);
expect(getPageTargeting().pa).toBe('f');
_.resetPageTargeting();
onConsentChange.mockImplementation(ccpaWithConsentMock);
expect(getPageTargeting().pa).toBe('t');
_.resetPageTargeting();
onConsentChange.mockImplementation(ccpaWithoutConsentMock);
expect(getPageTargeting().pa).toBe('f');
});
it('Should correctly set the RDP flag (rdp) param', () => {
_.resetPageTargeting();
onConsentChange.mockImplementation(tcfv2WithoutConsentMock);
expect(getPageTargeting().rdp).toBe('na');
_.resetPageTargeting();
onConsentChange.mockImplementation(tcfv2NullConsentMock);
expect(getPageTargeting().rdp).toBe('na');
_.resetPageTargeting();
onConsentChange.mockImplementation(ccpaWithConsentMock);
expect(getPageTargeting().rdp).toBe('f');
_.resetPageTargeting();
onConsentChange.mockImplementation(ccpaWithoutConsentMock);
expect(getPageTargeting().rdp).toBe('t');
});
it('Should correctly set the TCFv2 (consent_tcfv2, cmp_interaction) params', () => {
_.resetPageTargeting();
getPrivacyFramework.mockReturnValue({ tcfv2: true });
onConsentChange.mockImplementation(tcfv2WithConsentMock);
expect(getPageTargeting().consent_tcfv2).toBe('t');
expect(getPageTargeting().cmp_interaction).toBe('useractioncomplete');
_.resetPageTargeting();
onConsentChange.mockImplementation(tcfv2WithoutConsentMock);
expect(getPageTargeting().consent_tcfv2).toBe('f');
expect(getPageTargeting().cmp_interaction).toBe('cmpuishown');
_.resetPageTargeting();
onConsentChange.mockImplementation(tcfv2MixedConsentMock);
expect(getPageTargeting().consent_tcfv2).toBe('f');
expect(getPageTargeting().cmp_interaction).toBe('useractioncomplete');
});
it('should set correct edition param', () => {
expect(getPageTargeting().edition).toBe('us');
});
it('should set correct se param', () => {
expect(getPageTargeting().se).toEqual(['filmweekly']);
});
it('should set correct k param', () => {
expect(getPageTargeting().k).toEqual([
'prince-charles-letters',
'uk/uk',
'prince-charles',
]);
});
it('should set correct ab param', () => {
expect(getPageTargeting().ab).toEqual(['MtMaster-variantName']);
});
it('should set Observer flag for Observer content', () => {
expect(getPageTargeting().ob).toEqual('t');
});
it('should set correct branding param for paid content', () => {
expect(getPageTargeting().br).toEqual('p');
});
it('should not contain an ad-free targeting value', () => {
expect(getPageTargeting().af).toBeUndefined();
});
it('should remove empty values', () => {
window.guardian.config.page = {} as PageConfig;
window.guardian.config.ophan = { pageViewId: '123456' };
expect(getPageTargeting()).toEqual({
ab: ['MtMaster-variantName'],
amtgrp: '7', // Because Math.random() is fixed to 0.5
at: 'ng101',
bp: 'mobile',
cc: 'US',
cmp_interaction: 'na',
consent_tcfv2: 'na',
dcre: 'f',
fr: '0',
inskin: 'f',
pa: 'f',
pv: '123456',
rdp: 'na',
rp: 'dotcom-platform',
sens: 'f',
si: 't',
skinsize: 's',
});
});
describe('Breakpoint targeting', () => {
it('should set correct breakpoint targeting for a mobile device', () => {
mockViewport(320, 0);
expect(getPageTargeting().bp).toEqual('mobile');
});
it('should set correct breakpoint targeting for a medium mobile device', () => {
mockViewport(375, 0);
expect(getPageTargeting().bp).toEqual('mobile');
});
it('should set correct breakpoint targeting for a mobile device in landscape mode', () => {
mockViewport(480, 0);
expect(getPageTargeting().bp).toEqual('mobile');
});
it('should set correct breakpoint targeting for a phablet device', () => {
mockViewport(660, 0);
expect(getPageTargeting().bp).toEqual('tablet');
});
it('should set correct breakpoint targeting for a tablet device', () => {
mockViewport(740, 0);
expect(getPageTargeting().bp).toEqual('tablet');
});
it('should set correct breakpoint targeting for a desktop device', () => {
mockViewport(980, 0);
expect(getPageTargeting().bp).toEqual('desktop');
});
it('should set correct breakpoint targeting for a leftCol device', () => {
mockViewport(1140, 0);
expect(getPageTargeting().bp).toEqual('desktop');
});
it('should set correct breakpoint targeting for a wide device', () => {
mockViewport(1300, 0);
expect(getPageTargeting().bp).toEqual('desktop');
});
it('should set appNexusPageTargeting as flatten string', () => {
mockViewport(1024, 0);
getPageTargeting();
expect(window.guardian.config.page.appNexusPageTargeting).toEqual(
'sens=f,pt1=/football/series/footballweekly,pt2=us,pt3=video,pt4=ng,pt5=prince-charles-letters,pt5=uk/uk,pt5=prince-charles,pt6=5,pt7=desktop,pt9=presetOphanPageViewId|gabrielle-chan|news',
);
});
});
describe('Build Page Targeting (ad-free)', () => {
it('should set the ad-free param to t when enabled', () => {
commercialFeatures.adFree = true;
expect(getPageTargeting().af).toBe('t');
});
});
describe('Already visited frequency', () => {
it('can pass a value of five or less', () => {
storage.local.setRaw('gu.alreadyVisited', String(5));
expect(getPageTargeting().fr).toEqual('5');
});
it('between five and thirty, includes it in a bucket in the form "x-y"', () => {
storage.local.setRaw('gu.alreadyVisited', String(18));
expect(getPageTargeting().fr).toEqual('16-19');
});
it('over thirty, includes it in the bucket "30plus"', () => {
storage.local.setRaw('gu.alreadyVisited', String(300));
expect(getPageTargeting().fr).toEqual('30plus');
});
it('passes a value of 0 if the value is not stored', () => {
storage.local.remove('gu.alreadyVisited');
expect(getPageTargeting().fr).toEqual('0');
});
it('passes a value of 0 if the number is invalid', () => {
storage.local.setRaw('gu.alreadyVisited', 'not-a-number');
expect(getPageTargeting().fr).toEqual('0');
});
});
describe('Referrer', () => {
it('should set ref to Facebook', () => {
getReferrer.mockReturnValue(
'https://www.facebook.com/feel-the-force',
);
expect(getPageTargeting().ref).toEqual('facebook');
});
it('should set ref to Twitter', () => {
getReferrer.mockReturnValue(
'https://www.t.co/you-must-unlearn-what-you-have-learned',
);
expect(getPageTargeting().ref).toEqual('twitter');
});
it('should set ref to reddit', () => {
getReferrer.mockReturnValue(
'https://www.reddit.com/its-not-my-fault',
);
expect(getPageTargeting().ref).toEqual('reddit');
});
it('should set ref to google', () => {
getReferrer.mockReturnValue(
'https://www.google.com/i-find-your-lack-of-faith-distrubing',
);
expect(getPageTargeting().ref).toEqual('google');
});
it('should set ref empty string if referrer does not match', () => {
getReferrer.mockReturnValue('https://theguardian.com');
expect(getPageTargeting().ref).toEqual(undefined);
});
});
describe('URL Keywords', () => {
it('should return correct keywords from pageId', () => {
expect(getPageTargeting().urlkw).toEqual(['footballweekly']);
});
it('should extract multiple url keywords correctly', () => {
window.guardian.config.page.pageId =
'stage/2016/jul/26/harry-potter-cursed-child-review-palace-theatre-london';
expect(getPageTargeting().urlkw).toEqual([
'harry',
'potter',
'cursed',
'child',
'review',
'palace',
'theatre',
'london',
]);
});
it('should get correct keywords when trailing slash is present', () => {
window.guardian.config.page.pageId =
'stage/2016/jul/26/harry-potter-cursed-child-review-palace-theatre-london/';
expect(getPageTargeting().urlkw).toEqual([
'harry',
'potter',
'cursed',
'child',
'review',
'palace',
'theatre',
'london',
]);
});
});
describe('inskin targeting', () => {
it('should not allow inskin if cmp has not initialised', () => {
cmp.hasInitialised.mockReturnValue(false);
cmp.willShowPrivacyMessageSync.mockReturnValue(false);
mockViewport(1920, 1080);
expect(getPageTargeting().inskin).toBe('f');
});
it('should not allow inskin if cmp will show a banner', () => {
cmp.hasInitialised.mockReturnValue(true);
cmp.willShowPrivacyMessageSync.mockReturnValue(true);
mockViewport(1920, 1080);
expect(getPageTargeting().inskin).toBe('f');
});
});
describe('skinsize targetting', () => {
it.each([
['s', 1280],
['s', 1440],
['s', 1559],
['l', 1560],
['l', 1561],
['l', 1920],
['l', 2560],
])("should return '%s' if viewport width is %s", (expected, width) => {
cmp.hasInitialised.mockReturnValue(true);
cmp.willShowPrivacyMessageSync.mockReturnValue(false);
mockViewport(width, 800);
expect(getPageTargeting().skinsize).toBe(expected);
});
it("should return 's' if vp does not have a width", () => {
mockViewport(0, 0);
expect(getPageTargeting().skinsize).toBe('s');
});
});
describe('ad manager group value', () => {
const STORAGE_KEY = 'gu.adManagerGroup';
it('if present in localstorage, use value from storage', () => {
onConsentChange.mockImplementation(tcfv2WithConsentMock);
storage.local.setRaw(STORAGE_KEY, '10');
expect(getPageTargeting().amtgrp).toEqual('10');
storage.local.remove(STORAGE_KEY);
});
it.each([
[ccpaWithConsentMock, '9'],
[ccpaWithoutConsentMock, '9'],
[ausWithConsentMock, '9'],
[ausWithoutConsentMock, '9'],
[tcfv2WithConsentMock, '9'],
[tcfv2WithoutConsentMock, undefined],
[tcfv2MixedConsentMock, undefined],
[tcfv2MixedConsentMock, undefined],
])('Framework %p => amtgrp is %s', (framewok, value) => {
onConsentChange.mockImplementation(framewok);
storage.local.setRaw(STORAGE_KEY, '9');
expect(getPageTargeting().amtgrp).toEqual(value);
storage.local.remove(STORAGE_KEY);
});
it('if not present in localstorage, generate a random group 1-12, store in localstorage', () => {
onConsentChange.mockImplementation(tcfv2WithConsentMock);
// restore Math.random for this test so we can assert the group value range is 1-12
jest.spyOn(global.Math, 'random').mockRestore();
const valueGenerated = getPageTargeting().amtgrp;
expect(valueGenerated).toBeDefined();
expect(Number(valueGenerated)).toBeGreaterThanOrEqual(1);
expect(Number(valueGenerated)).toBeLessThanOrEqual(12);
const valueFromStorage = storage.local.getRaw(STORAGE_KEY);
expect(valueFromStorage).toEqual(valueGenerated);
});
});
}); | the_stack |
import * as React from 'react';
import { AbstractDataSource } from './AbstractDataSource';
import { DataItem, DataItemKind, DataSourceActionResult, MediaItem, SearchQuery, SearchResult } from '../types';
import { EventEmitter } from '../common/EventEmitter';
import type { EditorRegistry } from '../editors/EditorRegistry';
import { isMediaItem, isNoteItem } from '../utils';
import { LogService } from '../common/LogService';
import { DevtoolsContextType } from '../components/devtools/DevToolsContextProvider';
import { InternalTag } from './InternalTag';
const logger = LogService.getLogger('DataInterface');
export enum ItemChangeEventReason {
Created = 'created',
ChangedNoteContents = 'changed_contents',
Removed = 'removed',
Changed = 'changed',
}
export interface ItemChangeEvent {
id: string;
reason: ItemChangeEventReason;
// TODO: just send item data with it, its usually available anyways and saves tons of reads
}
export interface FileAddEvent {
id: string;
item: MediaItem;
insertIntoActiveEditor: boolean;
}
export class DataInterface implements AbstractDataSource {
private cache: { [key: string]: any } = {};
private cachedKeys: string[] = [];
private cachedKeysIt = 0;
private dirty = false;
private persistInterval?: number;
public onChangeItems = new EventEmitter<ItemChangeEvent[]>('DataInterface:onChangeItems');
public onAddFiles = new EventEmitter<FileAddEvent[]>('DataInterface:onAddFiles');
constructor(
public dataSource: AbstractDataSource,
private editors: EditorRegistry,
private cacheLength: number = 50,
private devtools?: DevtoolsContextType
) {
if (devtools) {
this.onChangeItems.on(() => devtools.increaseCounter('DI onChangeItems'));
this.onAddFiles.on(() => devtools.increaseCounter('DI onAddFiles'));
}
}
public async load() {
logger.log('load', [], { dataInterface: this });
await this.dataSource.load();
if (!(await this.dataSource.getStructure('tags'))) {
await this.dataSource.storeStructure('tags', {});
}
this.persistInterval = (setInterval(() => this.persist(), 10000) as unknown) as number;
}
public async reload() {
logger.log('reload', [], { dataInterface: this });
await this.dataSource.reload();
}
public async unload() {
logger.log('unload', [], { dataInterface: this });
await this.dataSource.unload();
if (this.persistInterval) {
clearInterval(this.persistInterval);
}
}
private async tryCache<T>(id: string, orFetch: () => Promise<T>) {
this.devtools?.increaseCounter('DI try cache');
const cacheItem = this.cache[id];
if (cacheItem) {
this.devtools?.increaseCounter('DI cache hit');
return cacheItem;
} else {
this.devtools?.increaseCounter('DI cache miss');
const item = await orFetch();
this.cache[id] = item;
this.cachedKeys[this.cachedKeysIt] = id;
this.cachedKeysIt++;
this.cachedKeysIt = this.cachedKeysIt % this.cacheLength;
if (this.cachedKeys.length >= this.cacheLength) {
this.devtools?.increaseCounter('DI cache purge');
delete this.cache[this.cachedKeysIt];
}
return item;
}
}
private updateCache(id: string, newValue: any) {
this.cache[id] = newValue;
}
private makeDirty() {
this.dirty = true;
}
public async getDataItem<K extends DataItemKind>(id: string): Promise<DataItem<K>> {
this.devtools?.increaseCounter('DI getDataItem');
const item = await this.tryCache(id, () => this.dataSource.getDataItem(id));
logger.log('Getting item with Id', [id], { item });
return item;
}
public async getNoteItemContent<C extends object>(id: string): Promise<C> {
this.devtools?.increaseCounter('DI getNoteItemContent');
return await this.dataSource.getNoteItemContent<C>(id);
}
public async writeNoteItemContent<C extends object>(id: string, content: C): Promise<DataSourceActionResult> {
this.devtools?.increaseCounter('DI writeNoteItemContent');
const result = await this.dataSource.writeNoteItemContent<C>(id, content);
logger.out('Writing contents to file for ', [id], { content });
this.onChangeItems.emit([{ id, reason: ItemChangeEventReason.ChangedNoteContents }]);
const currentItemContent = await this.getDataItem(id);
await this.changeItem(id, { ...currentItemContent, lastChange: new Date().getTime() });
return result;
}
public async createDataItem<K extends DataItemKind>(
item: Omit<DataItem<K>, 'id'> & { id?: string }
): Promise<DataItem<K>> {
this.devtools?.increaseCounter('DI createDataItem');
const result = await this.dataSource.createDataItem<K>(item);
await this.initializeNoteContent(result);
this.onChangeItems.emit([{ id: result.id, reason: ItemChangeEventReason.Created }]);
this.makeDirty();
return result;
}
public async createDataItemUnderParent<K extends DataItemKind>(
item: Omit<DataItem<K>, 'id'>,
parentId: string
): Promise<DataItem<K>> {
const parent = await this.dataSource.getDataItem(parentId);
if (!parent) {
throw Error(`Can't create item within parent ${parentId}, parent does not exist.`);
}
const itemResult = await this.dataSource.createDataItem<K>(item);
await this.initializeNoteContent(itemResult);
const overwriteParent = { ...parent, childIds: [...parent.childIds, itemResult.id] };
await this.dataSource.changeItem(parentId, overwriteParent);
this.updateCache(parentId, overwriteParent);
this.onChangeItems.emit([{ id: itemResult.id, reason: ItemChangeEventReason.Created }]);
this.onChangeItems.emit([{ id: parentId, reason: ItemChangeEventReason.Changed }]);
return itemResult;
}
public async addDataItemToParent(itemId: string, parentId: string): Promise<DataSourceActionResult> {
return await this.changeItem(parentId, old => ({ childIds: [...old.childIds, itemId] }));
}
public async removeDataItemFromParent(itemId: string, parentId: string): Promise<DataSourceActionResult> {
return await this.changeItem(parentId, old => ({ childIds: old.childIds.filter(id => id !== itemId) }));
}
public async removeItem(id: string, recursive?: boolean): Promise<DataSourceActionResult> {
this.devtools?.increaseCounter('DI removeItem');
const data = await this.dataSource.getDataItem(id);
if (!data) {
throw Error(`Cannot remove dataitem ${id}, it does not exist.`);
}
const result = await this.dataSource.removeItem(id);
this.updateCache(id, undefined);
const parents = await this.getParentsOf(id);
for (const parent of parents) {
await this.changeItem(parent.id, {
...parent,
childIds: parent.childIds.filter(childId => childId !== id),
});
}
if (isMediaItem(data)) {
await this.dataSource.removeMediaItemContent(data);
}
this.onChangeItems.emit([{ id, reason: ItemChangeEventReason.Removed }]);
this.makeDirty();
if (recursive) {
for (const childId of data.childIds) {
await this.removeItem(childId, true);
}
}
return result;
}
public async moveItem(id: string, originalParentId: string, targetParentId: string, targetIndex: number) {
// TODO move changeItem logic in here (we dont need tag change logic here) and send onChange event immediately to fix drag-and-drop herpiness in sidebar
this.devtools?.increaseCounter('DI moveItem');
// const item = await this.getDataItem(id);
const originalParent = await this.getDataItem(originalParentId);
await this.changeItem(originalParentId, {
...originalParent,
childIds: originalParent.childIds.filter(childId => id !== childId),
});
const targetParent = await this.getDataItem(targetParentId);
await this.changeItem(targetParentId, {
...targetParent,
childIds: [
...targetParent.childIds.filter((childId, index) => index < targetIndex),
id,
...targetParent.childIds.filter((childId, index) => index >= targetIndex),
],
});
const item = await this.getDataItem(id);
if (item.tags.includes(InternalTag.Draft)) {
await this.changeItem(id, {
tags: item.tags.filter(t => t !== InternalTag.Draft && t !== InternalTag.Trash),
});
}
}
public async changeItem<K extends DataItemKind>(
id: string,
overwriteWith: Partial<DataItem<K>> | ((old: DataItem<K>) => Partial<DataItem<K>>)
): Promise<DataSourceActionResult> {
this.devtools?.increaseCounter('DI changeItem');
const old = (await this.dataSource.getDataItem(id)) as DataItem<K>;
if (!old) {
throw Error(`Dataitem with id ${id} does not exist.`);
}
const overwriteItem = typeof overwriteWith === 'function' ? { ...old, ...overwriteWith(old) } : overwriteWith;
if (overwriteItem.tags && old.tags.sort().toString() !== overwriteItem.tags.sort().toString()) {
// TODO factor into its own method, use constant for 'tags'
const removedTags = old.tags.filter(tag => !overwriteItem.tags?.includes(tag));
const addedTags = overwriteItem.tags.filter(tag => !old.tags.includes(tag));
const tagsStructure = await this.getStructure('tags');
for (const removedTag of removedTags) {
if (tagsStructure[removedTag]) {
tagsStructure[removedTag].count--;
if (tagsStructure[removedTag].count === 0) {
delete tagsStructure[removedTag];
}
}
}
for (const addedTag of addedTags) {
if (tagsStructure[addedTag]) {
tagsStructure[addedTag].count++;
} else {
tagsStructure[addedTag] = {
count: 1,
};
}
}
await this.storeStructure('tags', tagsStructure);
}
const completeOverwriteItem = { ...old, ...overwriteItem };
logger.log('Updating item', [id], { old, update: overwriteItem, newItem: completeOverwriteItem });
const result = await this.dataSource.changeItem(id, completeOverwriteItem);
this.updateCache(id, completeOverwriteItem);
this.onChangeItems.emit([{ id, reason: ItemChangeEventReason.Changed }]);
this.makeDirty();
return result;
}
public async search(search: SearchQuery): Promise<SearchResult> {
this.devtools?.increaseCounter('DI search');
if (Object.keys(search).filter(key => (search as any)[key] !== undefined).length === 0) {
return {
results: [],
nextPageAvailable: false,
};
}
logger.log('Performing search', [], { search });
return await this.dataSource.search(search);
}
public async loadMediaItemContent(id: string): Promise<Buffer | Blob> {
return await this.dataSource.loadMediaItemContent(id);
}
public async loadMediaItemContentAsPath(id: string): Promise<string> {
return await this.dataSource.loadMediaItemContentAsPath(id);
}
public async loadMediaItemContentThumbnailAsPath(id: string): Promise<string | undefined> {
return await this.dataSource.loadMediaItemContentThumbnailAsPath(id);
}
public async storeMediaItemContent(
id: string,
localPath: string,
thumbnail: { width?: number; height?: number } | undefined
): Promise<DataSourceActionResult> {
return await this.dataSource.storeMediaItemContent(id, localPath, thumbnail);
}
public removeMediaItemContent(item: MediaItem): Promise<DataSourceActionResult> {
throw Error(
'Do not call DataInterface.removeMediaItemContent() directly, DataInterface.removeItem() will delete media data automatically.'
);
}
public async persist(): Promise<DataSourceActionResult> {
this.devtools?.increaseCounter('DI persist');
if (this.dirty) {
logger.log('Persisting', [], { source: this.dataSource });
this.dirty = false;
return await this.dataSource.persist();
} else {
logger.log('Skipping Persist, not dirty', [], { source: this.dataSource });
}
}
public async getParentsOf<K extends DataItemKind>(childId: string): Promise<DataItem<K>[]> {
this.devtools?.increaseCounter('DI getParentsOf');
return await this.dataSource.getParentsOf(childId);
}
private async initializeNoteContent(item: DataItem) {
if (isNoteItem(item)) {
const editor = this.editors.getEditorWithId(item.noteType);
if (!editor) {
throw Error(`Cannot initialize note content, editor ${item.noteType} is unknown.`);
}
await this.dataSource.writeNoteItemContent(item.id, editor.initializeContent());
}
}
public async getStructure<K extends any = any>(id: string): Promise<K> {
this.devtools?.increaseCounter('DI getStructure');
this.devtools?.increaseCounter('DI getStructure:' + id);
const structure = await this.dataSource.getStructure(id);
logger.log('getStructure', [id], { structure });
return structure; // TODO cache?
}
public async storeStructure<K extends any = any>(id: string, structure: K): Promise<DataSourceActionResult> {
this.devtools?.increaseCounter('DI storeStructure');
logger.log('storeStructure', [id], { structure });
this.makeDirty();
return await this.dataSource.storeStructure(id, structure);
}
public async getAvailableTags(): Promise<Array<{ value: string }>> {
this.devtools?.increaseCounter('DI getAvailableTags');
return Object.entries(await this.getStructure('tags')).map(([value, data]: any) => ({ value, ...data }));
}
// TODO bulk operations such as changeItems, removeItems, ...
} | the_stack |
import { useContentGqlHandler } from "../utils/useContentGqlHandler";
import { mocks as changeRequestMock, richTextMock } from "./mocks/changeRequest";
import { createSetupForContentReview } from "../utils/helpers";
describe(`Total comments count test`, () => {
const options = {
path: "manage/en-US"
};
const gqlHandler = useContentGqlHandler({
...options
});
const {
createChangeRequestMutation,
createContentReviewMutation,
listContentReviewsQuery,
listChangeRequestsQuery,
createCommentMutation,
listCommentsQuery,
deleteCommentMutation,
deleteChangeRequestMutation,
until
} = gqlHandler;
const createContentReview = async (page: any) => {
const [createContentReviewResponse] = await createContentReviewMutation({
data: {
content: {
id: page.id,
type: "page"
}
}
});
return createContentReviewResponse.data.apw.createContentReview.data;
};
const expectedSteps = expect.arrayContaining([
{
id: expect.any(String),
status: expect.any(String),
pendingChangeRequests: 1,
signOffProvidedOn: null,
signOffProvidedBy: null
}
]);
const expectedContent = {
id: expect.any(String),
type: expect.any(String),
version: expect.any(Number),
settings: null,
publishedBy: null,
publishedOn: null,
scheduledBy: null,
scheduledOn: null
};
test(`should able to update "totalComments" count in a content review`, async () => {
const { page } = await createSetupForContentReview(gqlHandler);
const contentReview = await createContentReview(page);
const [step1] = contentReview.steps;
await until(
() => listContentReviewsQuery({}).then(([data]) => data),
(response: any) => {
const list = response.data.apw.listContentReviews.data;
return list.length === 1;
},
{
name: `Wait for "ContentReview" entry to be available in list query`
}
);
/*
* Create a new change request entry for step 1.
*/
const [createChangeRequestResponse] = await createChangeRequestMutation({
data: changeRequestMock.createChangeRequestInput({
step: `${contentReview.id}#${step1.id}`
})
});
const changeRequested = createChangeRequestResponse.data.apw.createChangeRequest.data;
await until(
() => listChangeRequestsQuery({}).then(([data]) => data),
(response: any) => {
const list = response.data.apw.listChangeRequests.data;
return list.length === 1;
},
{
name: `Wait for "ChangeRequest" entry to be available in list query`
}
);
await until(
() => listContentReviewsQuery({}).then(([data]) => data),
(response: any) => {
const [entry] = response.data.apw.listContentReviews.data;
return (
entry.steps.find((step: any) => step.id === step1.id).pendingChangeRequests ===
1
);
},
{
name: "Wait for updated entry to be available in list query"
}
);
/*
* Add two comments on to the change request.
*/
const createdComments = [];
for (let i = 0; i < 2; i++) {
const [createCommentResponse] = await createCommentMutation({
data: {
body: richTextMock,
changeRequest: changeRequested.id
}
});
const comment = createCommentResponse.data.apw.createComment.data;
createdComments.push(comment);
}
await until(
() =>
listCommentsQuery({ changeRequest: { id: changeRequested.id } }).then(
([data]) => data
),
(response: any) => {
const list = response.data.apw.listComments.data;
return list.length === 2;
},
{
name: `Wait for "Comments" entry to be available in list query`
}
);
await until(
() => listContentReviewsQuery({}).then(([data]) => data),
(response: any) => {
const [entry] = response.data.apw.listContentReviews.data;
return entry.totalComments === 2;
},
{
name: `Wait for updated "totalComments" count to be available in list query`
}
);
const [comment1, comment2] = createdComments;
/**
* Should have 2 as totalComments count.
*/
let [listContentReviewsResponse] = await listContentReviewsQuery({});
expect(listContentReviewsResponse).toEqual({
data: {
apw: {
listContentReviews: {
data: [
{
id: expect.any(String),
createdOn: expect.stringMatching(/^20/),
savedOn: expect.stringMatching(/^20/),
createdBy: {
id: expect.any(String),
displayName: expect.any(String),
type: "admin"
},
status: "underReview",
title: expect.any(String),
content: expect.objectContaining(expectedContent),
steps: expectedSteps,
totalComments: 2,
activeStep: {
title: expect.any(String)
},
latestCommentId: comment2.id,
reviewers: expect.arrayContaining([expect.any(String)])
}
],
meta: {
hasMoreItems: false,
cursor: null,
totalCount: 1
},
error: null
}
}
}
});
/**
* Let's delete the second comment.
*/
const [deleteCommentResponse] = await deleteCommentMutation({ id: comment2.id });
expect(deleteCommentResponse).toEqual({
data: {
apw: {
deleteComment: {
data: true,
error: null
}
}
}
});
await until(
() =>
listCommentsQuery({ changeRequest: { id: changeRequested.id } }).then(
([data]) => data
),
(response: any) => {
const list = response.data.apw.listComments.data;
return list.length === 1;
},
{
name: `Wait for delete comment operation reflect in list query`
}
);
/**
* Should have 1 as totalComments count.
*/
[listContentReviewsResponse] = await listContentReviewsQuery({});
expect(listContentReviewsResponse).toEqual({
data: {
apw: {
listContentReviews: {
data: [
{
id: expect.any(String),
createdOn: expect.stringMatching(/^20/),
savedOn: expect.stringMatching(/^20/),
createdBy: {
id: expect.any(String),
displayName: expect.any(String),
type: "admin"
},
status: "underReview",
title: expect.any(String),
content: expect.objectContaining(expectedContent),
steps: expectedSteps,
totalComments: 1,
activeStep: {
title: expect.any(String)
},
latestCommentId: comment1.id,
reviewers: expect.arrayContaining([expect.any(String)])
}
],
meta: {
hasMoreItems: false,
cursor: null,
totalCount: 1
},
error: null
}
}
}
});
});
test(`should able to update "totalComments" count in a content review after "change request" delete`, async () => {
const { page } = await createSetupForContentReview(gqlHandler);
const contentReview = await createContentReview(page);
const [step1] = contentReview.steps;
await until(
() => listContentReviewsQuery({}).then(([data]) => data),
(response: any) => {
const list = response.data.apw.listContentReviews.data;
return list.length === 1;
},
{
name: `Wait for "ContentReview" entry to be available in list query`
}
);
/*
* Create a new change request entry for step 1.
*/
const [createChangeRequestResponse] = await createChangeRequestMutation({
data: changeRequestMock.createChangeRequestInput({
step: `${contentReview.id}#${step1.id}`
})
});
const changeRequested = createChangeRequestResponse.data.apw.createChangeRequest.data;
await until(
() => listChangeRequestsQuery({}).then(([data]) => data),
(response: any) => {
const list = response.data.apw.listChangeRequests.data;
return list.length === 1;
},
{
name: `Wait for "ChangeRequest" entry to be available in list query`
}
);
await until(
() => listContentReviewsQuery({}).then(([data]) => data),
(response: any) => {
const [entry] = response.data.apw.listContentReviews.data;
return (
entry.steps.find((step: any) => step.id === step1.id).pendingChangeRequests ===
1
);
},
{
name: "Wait for updated entry to be available in list query"
}
);
/*
* Add two comments on to the change request.
*/
const createdComments = [];
for (let i = 0; i < 2; i++) {
const [createCommentResponse] = await createCommentMutation({
data: {
body: richTextMock,
changeRequest: changeRequested.id
}
});
const comment = createCommentResponse.data.apw.createComment.data;
createdComments.push(comment);
}
await until(
() =>
listCommentsQuery({ changeRequest: { id: changeRequested.id } }).then(
([data]) => data
),
(response: any) => {
const list = response.data.apw.listComments.data;
return list.length === 2;
},
{
name: `Wait for "Comments" entry to be available in list query`
}
);
await until(
() => listContentReviewsQuery({}).then(([data]) => data),
(response: any) => {
const [entry] = response.data.apw.listContentReviews.data;
return entry.totalComments === 2;
},
{
name: `Wait for updated "totalComments" count to be available in list query`
}
);
const [, comment2] = createdComments;
/**
* Should have 2 as totalComments count.
*/
let [listContentReviewsResponse] = await listContentReviewsQuery({});
expect(listContentReviewsResponse).toEqual({
data: {
apw: {
listContentReviews: {
data: [
{
id: expect.any(String),
createdOn: expect.stringMatching(/^20/),
savedOn: expect.stringMatching(/^20/),
createdBy: {
id: expect.any(String),
displayName: expect.any(String),
type: "admin"
},
status: "underReview",
title: expect.any(String),
content: expect.objectContaining(expectedContent),
steps: expectedSteps,
totalComments: 2,
activeStep: {
title: expect.any(String)
},
latestCommentId: comment2.id,
reviewers: expect.arrayContaining([expect.any(String)])
}
],
meta: {
hasMoreItems: false,
cursor: null,
totalCount: 1
},
error: null
}
}
}
});
/**
* Let's delete the change request itself which in-turn should delete all the associated comments.
*/
const [deleteChangeRequestResponse] = await deleteChangeRequestMutation({
id: changeRequested.id
});
expect(deleteChangeRequestResponse).toEqual({
data: {
apw: {
deleteChangeRequest: {
data: true,
error: null
}
}
}
});
await until(
() =>
listCommentsQuery({ changeRequest: { id: changeRequested.id } }).then(
([data]) => data
),
(response: any) => {
const list = response.data.apw.listComments.data;
return list.length === 0;
},
{
name: `Wait for delete comment operation reflect in list query`
}
);
/**
* Should have 0 as totalComments count.
*/
[listContentReviewsResponse] = await listContentReviewsQuery({});
expect(listContentReviewsResponse).toEqual({
data: {
apw: {
listContentReviews: {
data: [
{
id: expect.any(String),
createdOn: expect.stringMatching(/^20/),
savedOn: expect.stringMatching(/^20/),
createdBy: {
id: expect.any(String),
displayName: expect.any(String),
type: "admin"
},
status: "underReview",
title: expect.any(String),
content: expect.objectContaining(expectedContent),
steps: expect.arrayContaining([
{
id: expect.any(String),
status: expect.any(String),
pendingChangeRequests: 0,
signOffProvidedOn: null,
signOffProvidedBy: null
}
]),
totalComments: 0,
activeStep: {
title: expect.any(String)
},
latestCommentId: null,
reviewers: expect.arrayContaining([expect.any(String)])
}
],
meta: {
hasMoreItems: false,
cursor: null,
totalCount: 1
},
error: null
}
}
}
});
});
}); | the_stack |
import {setup} from '#testHelpers'
test('click element', async () => {
const {element, getClickEventsSnapshot, getEvents, user} = setup('<div />')
await user.pointer({keys: '[MouseLeft]', target: element})
expect(getClickEventsSnapshot()).toMatchInlineSnapshot(`
pointerdown - pointerId=1; pointerType=mouse; isPrimary=true
mousedown - button=0; buttons=1; detail=1
pointerup - pointerId=1; pointerType=mouse; isPrimary=true
mouseup - button=0; buttons=0; detail=1
click - button=0; buttons=0; detail=1
`)
expect(getEvents('click')).toHaveLength(1)
})
test('secondary button triggers contextmenu', async () => {
const {element, getClickEventsSnapshot, getEvents, user} = setup('<div />')
await user.pointer({keys: '[MouseRight>]', target: element})
expect(getClickEventsSnapshot()).toMatchInlineSnapshot(`
pointerdown - pointerId=1; pointerType=mouse; isPrimary=true
mousedown - button=2; buttons=2; detail=1
contextmenu - button=2; buttons=2; detail=1
`)
expect(getEvents('contextmenu')).toHaveLength(1)
})
test('double click', async () => {
const {element, getClickEventsSnapshot, getEvents, user} =
setup(`<div></div>`)
await user.pointer({keys: '[MouseLeft][MouseLeft]', target: element})
expect(getClickEventsSnapshot()).toMatchInlineSnapshot(`
pointerdown - pointerId=1; pointerType=mouse; isPrimary=true
mousedown - button=0; buttons=1; detail=1
pointerup - pointerId=1; pointerType=mouse; isPrimary=true
mouseup - button=0; buttons=0; detail=1
click - button=0; buttons=0; detail=1
pointerdown - pointerId=1; pointerType=mouse; isPrimary=true
mousedown - button=0; buttons=1; detail=2
pointerup - pointerId=1; pointerType=mouse; isPrimary=true
mouseup - button=0; buttons=0; detail=2
click - button=0; buttons=0; detail=2
dblclick - button=0; buttons=0; detail=2
`)
expect(getEvents('dblclick')).toHaveLength(1)
expect(getEvents('click')).toHaveLength(2)
// detail reflects the click count
expect(getEvents('mousedown')[1]).toHaveProperty('detail', 2)
expect(getEvents('dblclick')[0]).toHaveProperty('detail', 2)
})
test('two clicks', async () => {
const {element, getClickEventsSnapshot, getEvents, user} =
setup(`<div></div>`)
await user.pointer({
keys: '[MouseLeft]',
target: element,
})
await user.pointer({keys: '[MouseLeft]'})
expect(getClickEventsSnapshot()).toMatchInlineSnapshot(`
pointerdown - pointerId=1; pointerType=mouse; isPrimary=true
mousedown - button=0; buttons=1; detail=1
pointerup - pointerId=1; pointerType=mouse; isPrimary=true
mouseup - button=0; buttons=0; detail=1
click - button=0; buttons=0; detail=1
pointerdown - pointerId=1; pointerType=mouse; isPrimary=true
mousedown - button=0; buttons=1; detail=1
pointerup - pointerId=1; pointerType=mouse; isPrimary=true
mouseup - button=0; buttons=0; detail=1
click - button=0; buttons=0; detail=1
`)
expect(getEvents('dblclick')).toHaveLength(0)
expect(getEvents('click')).toHaveLength(2)
expect(getEvents('mousedown')[1]).toHaveProperty('detail', 1)
})
test('other keys reset click counter, but keyup/click still uses the old count', async () => {
const {element, getClickEventsSnapshot, getEvents, user} =
setup(`<div></div>`)
await user.pointer({
keys: '[MouseLeft][MouseLeft>][MouseRight][MouseLeft]',
target: element,
})
expect(getClickEventsSnapshot()).toMatchInlineSnapshot(`
pointerdown - pointerId=1; pointerType=mouse; isPrimary=true
mousedown - button=0; buttons=1; detail=1
pointerup - pointerId=1; pointerType=mouse; isPrimary=true
mouseup - button=0; buttons=0; detail=1
click - button=0; buttons=0; detail=1
pointerdown - pointerId=1; pointerType=mouse; isPrimary=true
mousedown - button=0; buttons=1; detail=2
mousedown - button=2; buttons=3; detail=1
contextmenu - button=2; buttons=3; detail=1
mouseup - button=2; buttons=1; detail=1
pointerup - pointerId=1; pointerType=mouse; isPrimary=true
mouseup - button=0; buttons=0; detail=2
click - button=0; buttons=0; detail=2
dblclick - button=0; buttons=0; detail=2
pointerdown - pointerId=1; pointerType=mouse; isPrimary=true
mousedown - button=0; buttons=1; detail=1
pointerup - pointerId=1; pointerType=mouse; isPrimary=true
mouseup - button=0; buttons=0; detail=1
click - button=0; buttons=0; detail=1
`)
expect(getEvents('mouseup')[2]).toHaveProperty('detail', 2)
expect(getEvents('mousedown')[3]).toHaveProperty('detail', 1)
})
test('click per touch device', async () => {
const {element, getClickEventsSnapshot, getEvents, user} =
setup(`<div></div>`)
await user.pointer({keys: '[TouchA]', target: element})
expect(getClickEventsSnapshot()).toMatchInlineSnapshot(`
pointerover - pointerId=2; pointerType=touch; isPrimary=undefined
pointerenter - pointerId=2; pointerType=touch; isPrimary=undefined
pointerdown - pointerId=2; pointerType=touch; isPrimary=true
pointerup - pointerId=2; pointerType=touch; isPrimary=true
pointerout - pointerId=2; pointerType=touch; isPrimary=undefined
pointerleave - pointerId=2; pointerType=touch; isPrimary=undefined
mouseover - button=0; buttons=0; detail=0
mouseenter - button=0; buttons=0; detail=0
mousemove - button=0; buttons=0; detail=0
mousedown - button=0; buttons=0; detail=1
mouseup - button=0; buttons=0; detail=1
click - button=0; buttons=0; detail=1
`)
// mouse is pointerId=1, every other pointer gets a new id
expect(getEvents('click')).toHaveLength(1)
expect(getEvents('click')[0]).toHaveProperty('pointerId', 2)
})
test('double click per touch device', async () => {
const {element, getClickEventsSnapshot, getEvents, user} =
setup(`<div></div>`)
await user.pointer({keys: '[TouchA][TouchA]', target: element})
expect(getClickEventsSnapshot()).toMatchInlineSnapshot(`
pointerover - pointerId=2; pointerType=touch; isPrimary=undefined
pointerenter - pointerId=2; pointerType=touch; isPrimary=undefined
pointerdown - pointerId=2; pointerType=touch; isPrimary=true
pointerup - pointerId=2; pointerType=touch; isPrimary=true
pointerout - pointerId=2; pointerType=touch; isPrimary=undefined
pointerleave - pointerId=2; pointerType=touch; isPrimary=undefined
mouseover - button=0; buttons=0; detail=0
mouseenter - button=0; buttons=0; detail=0
mousemove - button=0; buttons=0; detail=0
mousedown - button=0; buttons=0; detail=1
mouseup - button=0; buttons=0; detail=1
click - button=0; buttons=0; detail=1
pointerover - pointerId=3; pointerType=touch; isPrimary=undefined
pointerenter - pointerId=3; pointerType=touch; isPrimary=undefined
pointerdown - pointerId=3; pointerType=touch; isPrimary=true
pointerup - pointerId=3; pointerType=touch; isPrimary=true
pointerout - pointerId=3; pointerType=touch; isPrimary=undefined
pointerleave - pointerId=3; pointerType=touch; isPrimary=undefined
mousemove - button=0; buttons=0; detail=0
mousedown - button=0; buttons=0; detail=2
mouseup - button=0; buttons=0; detail=2
click - button=0; buttons=0; detail=2
dblclick - button=0; buttons=0; detail=2
`)
// mouse is pointerId=1, every other pointer gets a new id
expect(getEvents('click')).toHaveLength(2)
expect(getEvents('click')[0]).toHaveProperty('pointerId', 2)
expect(getEvents('click')[1]).toHaveProperty('pointerId', 3)
expect(getEvents('dblclick')).toHaveLength(1)
expect(getEvents('dblclick')[0]).toHaveProperty('pointerId', undefined)
})
test('multi touch does not click', async () => {
const {element, getEvents, user} = setup(`<div></div>`)
await user.pointer({keys: '[TouchA>][TouchB][/TouchA]', target: element})
expect(getEvents('click')).toHaveLength(0)
})
describe('label', () => {
test('click associated control per label', async () => {
const {element, getEvents, user} = setup(
`<label for="in">foo</label><input id="in"/>`,
)
await user.pointer({keys: '[MouseLeft]', target: element})
expect(getEvents('click')).toHaveLength(2)
})
test('click nested control per label', async () => {
const {element, getEvents, user} = setup(`<label><input/></label>`)
await user.pointer({keys: '[MouseLeft]', target: element})
expect(getEvents('click')).toHaveLength(2)
})
test('click nested select per label', async () => {
const {element, getEvents, user} = setup(`<label><select/></label>`)
await user.pointer({keys: '[MouseLeft]', target: element})
expect(getEvents('click')).toHaveLength(2)
})
})
describe('check/uncheck control per click', () => {
test('clicking changes checkbox', async () => {
const {element, user} = setup('<input type="checkbox" />')
await user.pointer({keys: '[MouseLeft]', target: element})
expect(element).toBeChecked()
await user.pointer({keys: '[MouseLeft]', target: element})
expect(element).not.toBeChecked()
})
test('clicking changes radio button', async () => {
const {
elements: [radioA, radioB],
user,
} = setup(`
<input type="radio" name="foo"/>
<input type="radio" name="foo"/>
`)
await user.pointer({keys: '[MouseLeft]', target: radioA})
expect(radioA).toBeChecked()
await user.pointer({keys: '[MouseLeft]', target: radioB})
expect(radioA).not.toBeChecked()
})
test('clicking label changes checkable input', async () => {
const {
elements: [input, label],
user,
} = setup(`<input type="checkbox" id="a"/><label for="a"></label>`)
await user.pointer({keys: '[MouseLeft]', target: label})
expect(input).toBeChecked()
await user.pointer({keys: '[MouseLeft]', target: label})
expect(input).not.toBeChecked()
})
})
describe('submit form per click', () => {
test('submits a form when clicking on a <button>', async () => {
const {element, eventWasFired, user} = setup(
`<form><button></button></form>`,
)
await user.pointer({keys: '[MouseLeft]', target: element.children[0]})
expect(eventWasFired('submit')).toBe(true)
})
test('does not submit a form when clicking on a <button type="button">', async () => {
const {element, eventWasFired, user} = setup(
`<form><button type="button"></button></form>`,
)
await user.pointer({keys: '[MouseLeft]', target: element.children[0]})
expect(eventWasFired('submit')).toBe(false)
})
})
test('secondary mouse button fires `contextmenu` instead of `click`', async () => {
const {element, getEvents, clearEventCalls, user} = setup(`<button/>`)
await user.pointer({keys: '[MouseLeft]', target: element})
expect(getEvents('click')).toHaveLength(1)
expect(getEvents('contextmenu')).toHaveLength(0)
clearEventCalls()
await user.pointer({keys: '[MouseRight]', target: element})
expect(getEvents('contextmenu')).toHaveLength(1)
expect(getEvents('click')).toHaveLength(0)
}) | the_stack |
import {isChromeOS} from 'chrome://resources/js/cr.m.js';
import {loadTimeData} from 'chrome://resources/js/load_time_data.m.js';
import {DragManager, DragManagerDelegate, PLACEHOLDER_GROUP_ID, PLACEHOLDER_TAB_ID} from 'chrome://tab-strip.top-chrome/drag_manager.js';
import {TabElement} from 'chrome://tab-strip.top-chrome/tab.js';
import {TabGroupElement} from 'chrome://tab-strip.top-chrome/tab_group.js';
import {TabsApiProxyImpl} from 'chrome://tab-strip.top-chrome/tabs_api_proxy.js';
import {assertEquals, assertFalse, assertTrue} from 'chrome://webui-test/chai_assert.js';
import {createTab, TestTabsApiProxy} from './test_tabs_api_proxy.js';
class MockDelegate extends HTMLElement implements DragManagerDelegate {
getIndexOfTab(tabElement: TabElement) {
return Array.from(this.querySelectorAll('tabstrip-tab'))
.indexOf(tabElement);
}
placeTabElement(
element: TabElement, index: number, _pinned: boolean, groupId?: string) {
element.remove();
const parent =
groupId ? this.querySelector(`[data-group-id=${groupId}]`) : this;
parent!.insertBefore(element, this.children[index]!);
}
placeTabGroupElement(element: TabGroupElement, index: number) {
element.remove();
this.insertBefore(element, this.children[index]!);
}
shouldPreventDrag() {
return false;
}
}
customElements.define('mock-delegate', MockDelegate);
class MockDataTransfer extends DataTransfer {
private dropEffect_: 'link'|'none'|'copy'|'move' = 'none';
private effectAllowed_: 'none'|'copy'|'copyLink'|'copyMove'|'link'|'linkMove'|
'move'|'all'|'uninitialized' = 'none';
dragImageData: {image?: Element, offsetX?: number, offsetY?: number};
constructor() {
super();
this.dragImageData = {
image: undefined,
offsetX: undefined,
offsetY: undefined,
};
}
get dropEffect() {
return this.dropEffect_;
}
set dropEffect(effect) {
this.dropEffect_ = effect;
}
get effectAllowed() {
return this.effectAllowed_;
}
set effectAllowed(effect) {
this.effectAllowed_ = effect;
}
setDragImage(image: Element, offsetX: number, offsetY: number) {
this.dragImageData.image = image;
this.dragImageData.offsetX = offsetX;
this.dragImageData.offsetY = offsetY;
}
}
suite('DragManager', () => {
let delegate: MockDelegate;
let dragManager: DragManager;
let testTabsApiProxy: TestTabsApiProxy;
const tabs = [
createTab({
active: true,
id: 0,
index: 0,
title: 'Tab 1',
}),
createTab({
id: 1,
index: 1,
title: 'Tab 2',
}),
];
const strings = {
tabGroupIdDataType: 'application/group-id',
tabIdDataType: 'application/tab-id',
};
function groupTab(tabElement: TabElement, groupId: string): TabGroupElement {
const groupElement = document.createElement('tabstrip-tab-group');
groupElement.setAttribute('data-group-id', groupId);
delegate.replaceChild(groupElement, tabElement);
tabElement.tab = Object.assign({}, tabElement.tab, {groupId});
groupElement.appendChild(tabElement);
return groupElement;
}
setup(() => {
loadTimeData.overrideValues(strings);
testTabsApiProxy = new TestTabsApiProxy();
TabsApiProxyImpl.setInstance(testTabsApiProxy);
delegate = new MockDelegate();
tabs.forEach(tab => {
const tabElement = document.createElement('tabstrip-tab');
tabElement.tab = tab;
delegate.appendChild(tabElement);
});
dragManager = new DragManager(delegate);
dragManager.startObserving();
document.body.style.margin = '0';
document.body.appendChild(delegate);
});
test('DragStartSetsDragImage', () => {
const draggedElement = delegate.children[0] as TabElement | TabGroupElement;
const dragImage = draggedElement.getDragImage();
const dragImageCenter = draggedElement.getDragImageCenter();
// Mock the dimensions and position of the element and the drag image.
const draggedElementRect = {top: 20, left: 30, width: 200, height: 150};
draggedElement.getBoundingClientRect = () => draggedElementRect as DOMRect;
const dragImageRect = {top: 20, left: 30, width: 200, height: 150};
dragImage.getBoundingClientRect = () => dragImageRect as DOMRect;
const dragImageCenterRect = {top: 25, left: 25, width: 100, height: 120};
dragImageCenter.getBoundingClientRect = () =>
dragImageCenterRect as DOMRect;
const eventClientX = 100;
const eventClientY = 50;
const mockDataTransfer = new MockDataTransfer();
const dragStartEvent = new DragEvent('dragstart', {
bubbles: true,
composed: true,
clientX: eventClientX,
clientY: eventClientY,
dataTransfer: mockDataTransfer,
});
draggedElement.dispatchEvent(dragStartEvent);
assertEquals(dragStartEvent.dataTransfer!.effectAllowed, 'move');
assertEquals(
mockDataTransfer.dragImageData.image, draggedElement.getDragImage());
const eventXPercentage =
(eventClientX - draggedElementRect.left) / draggedElementRect.width;
const eventYPercentage =
(eventClientY - draggedElementRect.top) / draggedElementRect.height;
// Offset should account for any margins or padding between the
// dragImageCenter and the dragImage.
let dragImageCenterLeftMargin =
dragImageCenterRect.left - dragImageRect.left;
let dragImageCenterTopMargin = dragImageCenterRect.top - dragImageRect.top;
if (isChromeOS) {
// Dimensions are scaled on ChromeOS so the margins and paddings are also
// scaled.
dragImageCenterLeftMargin *= 1.2;
dragImageCenterTopMargin *= 1.2;
}
// Offset should map event's coordinates to within the dimensions of the
// dragImageCenter.
const eventXWithinDragImageCenter =
eventXPercentage * dragImageCenterRect.width;
const eventYWithinDragImageCenter =
eventYPercentage * dragImageCenterRect.height;
let expectedOffsetX =
dragImageCenterLeftMargin + eventXWithinDragImageCenter;
let expectedOffsetY =
dragImageCenterTopMargin + eventYWithinDragImageCenter;
if (isChromeOS) {
expectedOffsetY -= 25;
}
assertEquals(expectedOffsetX, mockDataTransfer.dragImageData.offsetX);
assertEquals(expectedOffsetY, mockDataTransfer.dragImageData.offsetY);
});
test('DragOverMovesTabs', async () => {
const draggedIndex = 0;
const dragOverIndex = 1;
const draggedTab = delegate.children[draggedIndex]!;
const dragOverTab = delegate.children[dragOverIndex]!;
const mockDataTransfer = new MockDataTransfer();
// Dispatch a dragstart event to start the drag process.
const dragStartEvent = new DragEvent('dragstart', {
bubbles: true,
composed: true,
clientX: 100,
clientY: 150,
dataTransfer: mockDataTransfer,
});
draggedTab.dispatchEvent(dragStartEvent);
// Move the draggedTab over the 2nd tab.
const dragOverEvent = new DragEvent('dragover', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
dragOverTab.dispatchEvent(dragOverEvent);
assertEquals(dragOverEvent.dataTransfer!.dropEffect, 'move');
// Dragover tab and dragged tab have now switched places in the DOM.
assertEquals(draggedTab, delegate.children[dragOverIndex]);
assertEquals(dragOverTab, delegate.children[draggedIndex]);
draggedTab.dispatchEvent(new DragEvent('drop', {bubbles: true}));
const [tabId, newIndex] = await testTabsApiProxy.whenCalled('moveTab');
assertEquals(tabId, tabs[draggedIndex]!.id);
assertEquals(newIndex, dragOverIndex);
});
test('DragTabOverTabGroup', async () => {
const tabElements = delegate.children as HTMLCollectionOf<TabElement>;
// Group the first tab.
const dragOverTabGroup = groupTab(tabElements[0]!, 'group0');
// Start dragging the second tab.
const draggedTab = tabElements[1]!;
const mockDataTransfer = new MockDataTransfer();
const dragStartEvent = new DragEvent('dragstart', {
bubbles: true,
composed: true,
clientX: 100,
clientY: 150,
dataTransfer: mockDataTransfer,
});
draggedTab.dispatchEvent(dragStartEvent);
// Drag the second tab over the newly created tab group.
const dragOverEvent = new DragEvent('dragover', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
dragOverTabGroup.dispatchEvent(dragOverEvent);
// Tab is now in the group within the DOM.
assertEquals(dragOverTabGroup, draggedTab.parentElement);
draggedTab.dispatchEvent(new DragEvent('drop', {bubbles: true}));
const [tabId, groupId] = await testTabsApiProxy.whenCalled('groupTab');
assertEquals(draggedTab.tab.id, tabId);
assertEquals('group0', groupId);
});
test('DragTabOutOfTabGroup', async () => {
// Group the first tab.
const draggedTab = (delegate.children as HTMLCollectionOf<TabElement>)[0]!;
groupTab(draggedTab, 'group0');
// Start dragging the first tab.
const mockDataTransfer = new MockDataTransfer();
const dragStartEvent = new DragEvent('dragstart', {
bubbles: true,
composed: true,
clientX: 100,
clientY: 150,
dataTransfer: mockDataTransfer,
});
draggedTab.dispatchEvent(dragStartEvent);
// Drag the first tab out.
const dragOverEvent = new DragEvent('dragover', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
delegate.dispatchEvent(dragOverEvent);
// The tab is now outside of the group in the DOM.
assertEquals(delegate, draggedTab.parentElement);
draggedTab.dispatchEvent(new DragEvent('drop', {bubbles: true}));
const [tabId] = await testTabsApiProxy.whenCalled('ungroupTab');
assertEquals(draggedTab.tab.id, tabId);
});
test('DragGroupOverTab', async () => {
const tabElements = delegate.children as HTMLCollectionOf<TabElement>;
// Start dragging the group.
const draggedGroupIndex = 0;
const draggedGroup = groupTab(tabElements[draggedGroupIndex]!, 'group0');
const mockDataTransfer = new MockDataTransfer();
const dragStartEvent = new DragEvent('dragstart', {
bubbles: true,
composed: true,
clientX: 100,
clientY: 150,
dataTransfer: mockDataTransfer,
});
draggedGroup.shadowRoot!.getElementById('dragHandle')!.dispatchEvent(
dragStartEvent);
// Drag the group over the second tab.
const dragOverIndex = 1;
const dragOverTab = tabElements[dragOverIndex]!;
const dragOverEvent = new DragEvent('dragover', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
dragOverTab.dispatchEvent(dragOverEvent);
// Group and tab have now switched places.
assertEquals(draggedGroup, delegate.children[dragOverIndex]);
assertEquals(dragOverTab, delegate.children[draggedGroupIndex]);
draggedGroup.dispatchEvent(new DragEvent('drop', {bubbles: true}));
const [groupId, index] = await testTabsApiProxy.whenCalled('moveGroup');
assertEquals('group0', groupId);
assertEquals(1, index);
});
test('DragGroupOverGroup', async () => {
const tabElements = delegate.children as HTMLCollectionOf<TabElement>;
// Group the first tab and second tab separately.
const draggedIndex = 0;
const draggedGroup = groupTab(tabElements[draggedIndex]!, 'group0');
const dragOverIndex = 1;
const dragOverGroup = groupTab(tabElements[dragOverIndex]!, 'group1');
// Start dragging the first group.
const mockDataTransfer = new MockDataTransfer();
const dragStartEvent = new DragEvent('dragstart', {
bubbles: true,
composed: true,
clientX: 100,
clientY: 150,
dataTransfer: mockDataTransfer,
});
draggedGroup.shadowRoot!.getElementById('dragHandle')!.dispatchEvent(
dragStartEvent);
// Drag the group over the second tab.
const dragOverEvent = new DragEvent('dragover', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
dragOverGroup.dispatchEvent(dragOverEvent);
// Groups have now switched places.
assertEquals(draggedGroup, delegate.children[dragOverIndex]);
assertEquals(dragOverGroup, delegate.children[draggedIndex]);
draggedGroup.dispatchEvent(new DragEvent('drop', {bubbles: true}));
const [groupId, index] = await testTabsApiProxy.whenCalled('moveGroup');
assertEquals('group0', groupId);
assertEquals(1, index);
});
test('DragExternalTabOverTab', async () => {
const externalTabId = 1000;
const mockDataTransfer = new MockDataTransfer();
mockDataTransfer.setData(strings.tabIdDataType, `${externalTabId}`);
const dragEnterEvent = new DragEvent('dragenter', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
delegate.dispatchEvent(dragEnterEvent);
// Test that a placeholder tab was created.
const placeholderTabElement = delegate.lastElementChild as TabElement;
assertEquals(PLACEHOLDER_TAB_ID, placeholderTabElement.tab.id);
const dragOverIndex = 0;
const dragOverTab = delegate.children[dragOverIndex]!;
const dragOverEvent = new DragEvent('dragover', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
dragOverTab.dispatchEvent(dragOverEvent);
assertEquals(placeholderTabElement, delegate.children[dragOverIndex]);
const dropEvent = new DragEvent('drop', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
dragOverTab.dispatchEvent(dropEvent);
assertEquals(externalTabId, placeholderTabElement.tab.id);
const [tabId, index] = await testTabsApiProxy.whenCalled('moveTab');
assertEquals(externalTabId, tabId);
assertEquals(dragOverIndex, index);
});
test('DragExternalTabOverTabGroup', async () => {
const externalTabId = 1000;
const mockDataTransfer = new MockDataTransfer();
mockDataTransfer.setData(strings.tabIdDataType, `${externalTabId}`);
const dragEnterEvent = new DragEvent('dragenter', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
delegate.dispatchEvent(dragEnterEvent);
const placeholderTabElement = delegate.lastElementChild!;
const draggedGroup = groupTab(delegate.children[0] as TabElement, 'group0');
const dragOverEvent = new DragEvent('dragover', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
draggedGroup.dispatchEvent(dragOverEvent);
assertEquals(draggedGroup, placeholderTabElement.parentElement);
const dropEvent = new DragEvent('drop', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
draggedGroup.dispatchEvent(dropEvent);
const [tabId, groupId] = await testTabsApiProxy.whenCalled('groupTab');
assertEquals(externalTabId, tabId);
assertEquals('group0', groupId);
});
test('DragExternalTabGroupOverTab', async () => {
const externalGroupId = 'external-group';
const mockDataTransfer = new MockDataTransfer();
mockDataTransfer.setData(strings.tabGroupIdDataType, `${externalGroupId}`);
const dragEnterEvent = new DragEvent('dragenter', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
delegate.dispatchEvent(dragEnterEvent);
// Test that a placeholder group was created.
const placeholderGroupElement = delegate.lastElementChild as TabElement;
assertEquals(
PLACEHOLDER_GROUP_ID, placeholderGroupElement.dataset['groupId']);
function dragOverTabAt(dragOverIndex: number) {
const dragOverTab = delegate.children[dragOverIndex]!;
const dragOverEvent = new DragEvent('dragover', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
dragOverTab.dispatchEvent(dragOverEvent);
assertEquals(placeholderGroupElement, delegate.children[dragOverIndex]);
}
// Test moving forwards and backwards in the tab strip.
dragOverTabAt(0);
dragOverTabAt(1);
dragOverTabAt(2);
dragOverTabAt(0);
const dropEvent = new DragEvent('drop', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
placeholderGroupElement.dispatchEvent(dropEvent);
assertEquals(externalGroupId, placeholderGroupElement.dataset['groupId']);
const [groupId, index] = await testTabsApiProxy.whenCalled('moveGroup');
assertEquals(externalGroupId, groupId);
assertEquals(0, index);
});
test('DragExternalTabGroupOverTabGroup', async () => {
const externalGroupId = 'external-group';
const mockDataTransfer = new MockDataTransfer();
mockDataTransfer.setData(strings.tabGroupIdDataType, `${externalGroupId}`);
const dragEnterEvent = new DragEvent('dragenter', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
delegate.dispatchEvent(dragEnterEvent);
const placeholderGroupElement = delegate.lastElementChild!;
const dragOverGroupIndex = 0;
const dragOverGroup =
groupTab(delegate.children[dragOverGroupIndex] as TabElement, 'group0');
const dragOverEvent = new DragEvent('dragover', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
dragOverGroup.dispatchEvent(dragOverEvent);
assertEquals(
placeholderGroupElement, delegate.children[dragOverGroupIndex]);
const dropEvent = new DragEvent('drop', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
placeholderGroupElement.dispatchEvent(dropEvent);
const [groupId, index] = await testTabsApiProxy.whenCalled('moveGroup');
assertEquals(externalGroupId, groupId);
assertEquals(dragOverGroupIndex, index);
});
test('CancelDragResetsPosition', () => {
const draggedIndex = 0;
const draggedTab = delegate.children[draggedIndex]!;
const mockDataTransfer = new MockDataTransfer();
// Dispatch a dragstart event to start the drag process.
const dragStartEvent = new DragEvent('dragstart', {
bubbles: true,
composed: true,
clientX: 100,
clientY: 150,
dataTransfer: mockDataTransfer,
});
draggedTab.dispatchEvent(dragStartEvent);
// Move the draggedTab over the 2nd tab.
const dragOverEvent = new DragEvent('dragover', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
delegate.children[1]!.dispatchEvent(dragOverEvent);
draggedTab.dispatchEvent(new DragEvent('dragend', {bubbles: true}));
assertEquals(draggedTab, delegate.children[draggedIndex]);
});
test('DragLeaveRemovesExternalTab', () => {
const externalTabId = 1000;
const mockDataTransfer = new MockDataTransfer();
mockDataTransfer.setData(strings.tabIdDataType, `${externalTabId}`);
const dragEnterEvent = new DragEvent('dragenter', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
delegate.dispatchEvent(dragEnterEvent);
assertTrue(
!!delegate.querySelector(`[data-tab-id="${PLACEHOLDER_TAB_ID}"]`));
const dragLeaveEvent = new DragEvent('dragleave', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
delegate.dispatchEvent(dragLeaveEvent);
assertFalse(
!!delegate.querySelector(`[data-tab-id="${PLACEHOLDER_TAB_ID}"]`));
});
test('DragOverInvalidDragOverTarget', () => {
const draggedIndex = 0;
const dragOverIndex = 1;
const draggedTab = delegate.children[draggedIndex]!;
const dragOverTab = delegate.children[dragOverIndex] as TabElement;
const mockDataTransfer = new MockDataTransfer();
// Dispatch a dragstart event to start the drag process.
const dragStartEvent = new DragEvent('dragstart', {
bubbles: true,
composed: true,
clientX: 100,
clientY: 150,
dataTransfer: mockDataTransfer,
});
draggedTab.dispatchEvent(dragStartEvent);
// Mark the dragOverIndex tab to be an invalid dragover target.
dragOverTab.isValidDragOverTarget = false;
const dragOverEvent = new DragEvent('dragover', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
dragOverTab.dispatchEvent(dragOverEvent);
// Dragover tab and dragged tab remain in their initial positions.
assertEquals(draggedTab, delegate.children[draggedIndex]);
assertEquals(dragOverTab, delegate.children[dragOverIndex]);
});
test('DragLeaveUpdatesElementsAsDraggedOut', () => {
let isDraggedOut = false;
// Mock a tab's setDraggedOut method to ensure it is called.
const draggedTab = delegate.children[0] as TabElement;
draggedTab.setDraggedOut = (isDraggedOutParam) => {
isDraggedOut = isDraggedOutParam;
};
const dataTransfer = new MockDataTransfer();
draggedTab.dispatchEvent(new DragEvent('dragstart', {
bubbles: true,
composed: true,
clientX: 100,
clientY: 150,
dataTransfer,
}));
delegate.dispatchEvent(new DragEvent('dragleave', {dataTransfer}));
assertTrue(isDraggedOut);
delegate.dispatchEvent(new DragEvent('dragover', {dataTransfer}));
assertFalse(isDraggedOut);
});
test('DragendAfterMovingDoesNotShowContextMenu', async () => {
const draggedTab = delegate.children[0]!;
const dragOverTab = delegate.children[1]!;
const dragDetails = {
bubbles: true,
composed: true,
clientX: 100,
clientY: 150,
dataTransfer: new MockDataTransfer(),
};
draggedTab.dispatchEvent(new DragEvent('dragstart', dragDetails));
dragOverTab.dispatchEvent(new DragEvent(
'dragover', Object.assign({}, dragDetails, {clientX: 200})));
draggedTab.dispatchEvent(new DragEvent('dragend', dragDetails));
assertEquals(0, testTabsApiProxy.getCallCount('showTabContextMenu'));
});
test('DropPlaceholderWithoutMovingDoesNotShowContextMenu', () => {
const externalTabId = 1000;
const mockDataTransfer = new MockDataTransfer();
mockDataTransfer.setData(strings.tabIdDataType, `${externalTabId}`);
const dragEnterEvent = new DragEvent('dragenter', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
});
delegate.dispatchEvent(dragEnterEvent);
delegate.dispatchEvent(new DragEvent('drop', {
bubbles: true,
composed: true,
dataTransfer: mockDataTransfer,
}));
assertEquals(0, testTabsApiProxy.getCallCount('showTabContextMenu'));
});
test('DragEndWithDropEffectMoveDoesNotRemoveDraggedOutAttribute', () => {
const draggedTab = delegate.children[0] as TabElement;
const dataTransfer = new MockDataTransfer();
draggedTab.dispatchEvent(new DragEvent('dragstart', {
bubbles: true,
composed: true,
clientX: 100,
clientY: 150,
dataTransfer,
}));
delegate.dispatchEvent(new DragEvent('dragleave', {dataTransfer}));
assertTrue(draggedTab.isDraggedOut());
dataTransfer.dropEffect = 'move';
delegate.dispatchEvent(new DragEvent('dragend', {dataTransfer}));
assertTrue(draggedTab.isDraggedOut());
});
test('DragEndWithDropEffectNoneRemovesDraggedOutAttribute', () => {
const draggedTab = delegate.children[0] as TabElement;
const dataTransfer = new MockDataTransfer();
draggedTab.dispatchEvent(new DragEvent('dragstart', {
bubbles: true,
composed: true,
clientX: 100,
clientY: 150,
dataTransfer,
}));
delegate.dispatchEvent(new DragEvent('dragleave', {dataTransfer}));
assertTrue(draggedTab.isDraggedOut());
dataTransfer.dropEffect = 'none';
delegate.dispatchEvent(new DragEvent('dragend', {dataTransfer}));
assertFalse(draggedTab.isDraggedOut());
});
test('DragIsPrevented', async () => {
// Mock the delegate to return true for shouldPreventDrag.
delegate.shouldPreventDrag = () => true;
const draggedTab = delegate.children[0]!;
let isDefaultPrevented = false;
delegate.addEventListener('dragstart', e => {
isDefaultPrevented = e.defaultPrevented;
});
const dataTransfer = new MockDataTransfer();
draggedTab.dispatchEvent(new DragEvent('dragstart', {
bubbles: true,
cancelable: true,
composed: true,
clientX: 100,
clientY: 150,
dataTransfer,
}));
assertTrue(isDefaultPrevented);
});
}); | the_stack |
import {
IResultRoomEvents,
ISearchRequestBody,
ISearchResponse,
ISearchResult,
ISearchResults,
SearchOrderBy,
} from "matrix-js-sdk/src/@types/search";
import { IRoomEventFilter } from "matrix-js-sdk/src/filter";
import { EventType } from "matrix-js-sdk/src/@types/event";
import { SearchResult } from "matrix-js-sdk/src/models/search-result";
import { ISearchArgs } from "./indexing/BaseEventIndexManager";
import EventIndexPeg from "./indexing/EventIndexPeg";
import { MatrixClientPeg } from "./MatrixClientPeg";
const SEARCH_LIMIT = 10;
async function serverSideSearch(
term: string,
roomId: string = undefined,
): Promise<{ response: ISearchResponse, query: ISearchRequestBody }> {
const client = MatrixClientPeg.get();
const filter: IRoomEventFilter = {
limit: SEARCH_LIMIT,
};
if (roomId !== undefined) filter.rooms = [roomId];
const body: ISearchRequestBody = {
search_categories: {
room_events: {
search_term: term,
filter: filter,
order_by: SearchOrderBy.Recent,
event_context: {
before_limit: 1,
after_limit: 1,
include_profile: true,
},
},
},
};
const response = await client.search({ body: body });
return { response, query: body };
}
async function serverSideSearchProcess(term: string, roomId: string = undefined): Promise<ISearchResults> {
const client = MatrixClientPeg.get();
const result = await serverSideSearch(term, roomId);
// The js-sdk method backPaginateRoomEventsSearch() uses _query internally
// so we're reusing the concept here since we want to delegate the
// pagination back to backPaginateRoomEventsSearch() in some cases.
const searchResults: ISearchResults = {
_query: result.query,
results: [],
highlights: [],
};
return client.processRoomEventsSearch(searchResults, result.response);
}
function compareEvents(a: ISearchResult, b: ISearchResult): number {
const aEvent = a.result;
const bEvent = b.result;
if (aEvent.origin_server_ts > bEvent.origin_server_ts) return -1;
if (aEvent.origin_server_ts < bEvent.origin_server_ts) return 1;
return 0;
}
async function combinedSearch(searchTerm: string): Promise<ISearchResults> {
const client = MatrixClientPeg.get();
// Create two promises, one for the local search, one for the
// server-side search.
const serverSidePromise = serverSideSearch(searchTerm);
const localPromise = localSearch(searchTerm);
// Wait for both promises to resolve.
await Promise.all([serverSidePromise, localPromise]);
// Get both search results.
const localResult = await localPromise;
const serverSideResult = await serverSidePromise;
const serverQuery = serverSideResult.query;
const serverResponse = serverSideResult.response;
const localQuery = localResult.query;
const localResponse = localResult.response;
// Store our queries for later on so we can support pagination.
//
// We're reusing _query here again to not introduce separate code paths and
// concepts for our different pagination methods. We're storing the
// server-side next batch separately since the query is the json body of
// the request and next_batch needs to be a query parameter.
//
// We can't put it in the final result that _processRoomEventsSearch()
// returns since that one can be either a server-side one, a local one or a
// fake one to fetch the remaining cached events. See the docs for
// combineEvents() for an explanation why we need to cache events.
const emptyResult: ISeshatSearchResults = {
seshatQuery: localQuery,
_query: serverQuery,
serverSideNextBatch: serverResponse.search_categories.room_events.next_batch,
cachedEvents: [],
oldestEventFrom: "server",
results: [],
highlights: [],
};
// Combine our results.
const combinedResult = combineResponses(emptyResult, localResponse, serverResponse.search_categories.room_events);
// Let the client process the combined result.
const response: ISearchResponse = {
search_categories: {
room_events: combinedResult,
},
};
const result = client.processRoomEventsSearch(emptyResult, response);
// Restore our encryption info so we can properly re-verify the events.
restoreEncryptionInfo(result.results);
return result;
}
async function localSearch(
searchTerm: string,
roomId: string = undefined,
processResult = true,
): Promise<{ response: IResultRoomEvents, query: ISearchArgs }> {
const eventIndex = EventIndexPeg.get();
const searchArgs: ISearchArgs = {
search_term: searchTerm,
before_limit: 1,
after_limit: 1,
limit: SEARCH_LIMIT,
order_by_recency: true,
room_id: undefined,
};
if (roomId !== undefined) {
searchArgs.room_id = roomId;
}
const localResult = await eventIndex.search(searchArgs);
searchArgs.next_batch = localResult.next_batch;
const result = {
response: localResult,
query: searchArgs,
};
return result;
}
export interface ISeshatSearchResults extends ISearchResults {
seshatQuery?: ISearchArgs;
cachedEvents?: ISearchResult[];
oldestEventFrom?: "local" | "server";
serverSideNextBatch?: string;
}
async function localSearchProcess(searchTerm: string, roomId: string = undefined): Promise<ISeshatSearchResults> {
const emptyResult = {
results: [],
highlights: [],
} as ISeshatSearchResults;
if (searchTerm === "") return emptyResult;
const result = await localSearch(searchTerm, roomId);
emptyResult.seshatQuery = result.query;
const response: ISearchResponse = {
search_categories: {
room_events: result.response,
},
};
const processedResult = MatrixClientPeg.get().processRoomEventsSearch(emptyResult, response);
// Restore our encryption info so we can properly re-verify the events.
restoreEncryptionInfo(processedResult.results);
return processedResult;
}
async function localPagination(searchResult: ISeshatSearchResults): Promise<ISeshatSearchResults> {
const eventIndex = EventIndexPeg.get();
const searchArgs = searchResult.seshatQuery;
const localResult = await eventIndex.search(searchArgs);
searchResult.seshatQuery.next_batch = localResult.next_batch;
// We only need to restore the encryption state for the new results, so
// remember how many of them we got.
const newResultCount = localResult.results.length;
const response = {
search_categories: {
room_events: localResult,
},
};
const result = MatrixClientPeg.get().processRoomEventsSearch(searchResult, response);
// Restore our encryption info so we can properly re-verify the events.
const newSlice = result.results.slice(Math.max(result.results.length - newResultCount, 0));
restoreEncryptionInfo(newSlice);
searchResult.pendingRequest = null;
return result;
}
function compareOldestEvents(firstResults: ISearchResult[], secondResults: ISearchResult[]): number {
try {
const oldestFirstEvent = firstResults[firstResults.length - 1].result;
const oldestSecondEvent = secondResults[secondResults.length - 1].result;
if (oldestFirstEvent.origin_server_ts <= oldestSecondEvent.origin_server_ts) {
return -1;
} else {
return 1;
}
} catch {
return 0;
}
}
function combineEventSources(
previousSearchResult: ISeshatSearchResults,
response: IResultRoomEvents,
a: ISearchResult[],
b: ISearchResult[],
): void {
// Merge event sources and sort the events.
const combinedEvents = a.concat(b).sort(compareEvents);
// Put half of the events in the response, and cache the other half.
response.results = combinedEvents.slice(0, SEARCH_LIMIT);
previousSearchResult.cachedEvents = combinedEvents.slice(SEARCH_LIMIT);
}
/**
* Combine the events from our event sources into a sorted result
*
* This method will first be called from the combinedSearch() method. In this
* case we will fetch SEARCH_LIMIT events from the server and the local index.
*
* The method will put the SEARCH_LIMIT newest events from the server and the
* local index in the results part of the response, the rest will be put in the
* cachedEvents field of the previousSearchResult (in this case an empty search
* result).
*
* Every subsequent call will be made from the combinedPagination() method, in
* this case we will combine the cachedEvents and the next SEARCH_LIMIT events
* from either the server or the local index.
*
* Since we have two event sources and we need to sort the results by date we
* need keep on looking for the oldest event. We are implementing a variation of
* a sliding window.
*
* The event sources are here represented as two sorted lists where the smallest
* number represents the newest event. The two lists need to be merged in a way
* that preserves the sorted property so they can be shown as one search result.
* We first fetch SEARCH_LIMIT events from both sources.
*
* If we set SEARCH_LIMIT to 3:
*
* Server events [01, 02, 04, 06, 07, 08, 11, 13]
* |01, 02, 04|
* Local events [03, 05, 09, 10, 12, 14, 15, 16]
* |03, 05, 09|
*
* We note that the oldest event is from the local index, and we combine the
* results:
*
* Server window [01, 02, 04]
* Local window [03, 05, 09]
*
* Combined events [01, 02, 03, 04, 05, 09]
*
* We split the combined result in the part that we want to present and a part
* that will be cached.
*
* Presented events [01, 02, 03]
* Cached events [04, 05, 09]
*
* We slide the window for the server since the oldest event is from the local
* index.
*
* Server events [01, 02, 04, 06, 07, 08, 11, 13]
* |06, 07, 08|
* Local events [03, 05, 09, 10, 12, 14, 15, 16]
* |XX, XX, XX|
* Cached events [04, 05, 09]
*
* We note that the oldest event is from the server and we combine the new
* server events with the cached ones.
*
* Cached events [04, 05, 09]
* Server events [06, 07, 08]
*
* Combined events [04, 05, 06, 07, 08, 09]
*
* We split again.
*
* Presented events [04, 05, 06]
* Cached events [07, 08, 09]
*
* We slide the local window, the oldest event is on the server.
*
* Server events [01, 02, 04, 06, 07, 08, 11, 13]
* |XX, XX, XX|
* Local events [03, 05, 09, 10, 12, 14, 15, 16]
* |10, 12, 14|
*
* Cached events [07, 08, 09]
* Local events [10, 12, 14]
* Combined events [07, 08, 09, 10, 12, 14]
*
* Presented events [07, 08, 09]
* Cached events [10, 12, 14]
*
* Next up we slide the server window again.
*
* Server events [01, 02, 04, 06, 07, 08, 11, 13]
* |11, 13|
* Local events [03, 05, 09, 10, 12, 14, 15, 16]
* |XX, XX, XX|
*
* Cached events [10, 12, 14]
* Server events [11, 13]
* Combined events [10, 11, 12, 13, 14]
*
* Presented events [10, 11, 12]
* Cached events [13, 14]
*
* We have one source exhausted, we fetch the rest of our events from the other
* source and combine it with our cached events.
*
*
* @param {object} previousSearchResult A search result from a previous search
* call.
* @param {object} localEvents An unprocessed search result from the event
* index.
* @param {object} serverEvents An unprocessed search result from the server.
*
* @return {object} A response object that combines the events from the
* different event sources.
*
*/
function combineEvents(
previousSearchResult: ISeshatSearchResults,
localEvents: IResultRoomEvents = undefined,
serverEvents: IResultRoomEvents = undefined,
): IResultRoomEvents {
const response = {} as IResultRoomEvents;
const cachedEvents = previousSearchResult.cachedEvents;
let oldestEventFrom = previousSearchResult.oldestEventFrom;
response.highlights = previousSearchResult.highlights;
if (localEvents && serverEvents && serverEvents.results) {
// This is a first search call, combine the events from the server and
// the local index. Note where our oldest event came from, we shall
// fetch the next batch of events from the other source.
if (compareOldestEvents(localEvents.results, serverEvents.results) < 0) {
oldestEventFrom = "local";
}
combineEventSources(previousSearchResult, response, localEvents.results, serverEvents.results);
response.highlights = localEvents.highlights.concat(serverEvents.highlights);
} else if (localEvents) {
// This is a pagination call fetching more events from the local index,
// meaning that our oldest event was on the server.
// Change the source of the oldest event if our local event is older
// than the cached one.
if (compareOldestEvents(localEvents.results, cachedEvents) < 0) {
oldestEventFrom = "local";
}
combineEventSources(previousSearchResult, response, localEvents.results, cachedEvents);
} else if (serverEvents && serverEvents.results) {
// This is a pagination call fetching more events from the server,
// meaning that our oldest event was in the local index.
// Change the source of the oldest event if our server event is older
// than the cached one.
if (compareOldestEvents(serverEvents.results, cachedEvents) < 0) {
oldestEventFrom = "server";
}
combineEventSources(previousSearchResult, response, serverEvents.results, cachedEvents);
} else {
// This is a pagination call where we exhausted both of our event
// sources, let's push the remaining cached events.
response.results = cachedEvents;
previousSearchResult.cachedEvents = [];
}
previousSearchResult.oldestEventFrom = oldestEventFrom;
return response;
}
/**
* Combine the local and server search responses
*
* @param {object} previousSearchResult A search result from a previous search
* call.
* @param {object} localEvents An unprocessed search result from the event
* index.
* @param {object} serverEvents An unprocessed search result from the server.
*
* @return {object} A response object that combines the events from the
* different event sources.
*/
function combineResponses(
previousSearchResult: ISeshatSearchResults,
localEvents: IResultRoomEvents = undefined,
serverEvents: IResultRoomEvents = undefined,
): IResultRoomEvents {
// Combine our events first.
const response = combineEvents(previousSearchResult, localEvents, serverEvents);
// Our first search will contain counts from both sources, subsequent
// pagination requests will fetch responses only from one of the sources, so
// reuse the first count when we're paginating.
if (previousSearchResult.count) {
response.count = previousSearchResult.count;
} else {
response.count = localEvents.count + serverEvents.count;
}
// Update our next batch tokens for the given search sources.
if (localEvents) {
previousSearchResult.seshatQuery.next_batch = localEvents.next_batch;
}
if (serverEvents) {
previousSearchResult.serverSideNextBatch = serverEvents.next_batch;
}
// Set the response next batch token to one of the tokens from the sources,
// this makes sure that if we exhaust one of the sources we continue with
// the other one.
if (previousSearchResult.seshatQuery.next_batch) {
response.next_batch = previousSearchResult.seshatQuery.next_batch;
} else if (previousSearchResult.serverSideNextBatch) {
response.next_batch = previousSearchResult.serverSideNextBatch;
}
// We collected all search results from the server as well as from Seshat,
// we still have some events cached that we'll want to display on the next
// pagination request.
//
// Provide a fake next batch token for that case.
if (!response.next_batch && previousSearchResult.cachedEvents.length > 0) {
response.next_batch = "cached";
}
return response;
}
interface IEncryptedSeshatEvent {
curve25519Key: string;
ed25519Key: string;
algorithm: string;
forwardingCurve25519KeyChain: string[];
}
function restoreEncryptionInfo(searchResultSlice: SearchResult[] = []): void {
for (let i = 0; i < searchResultSlice.length; i++) {
const timeline = searchResultSlice[i].context.getTimeline();
for (let j = 0; j < timeline.length; j++) {
const mxEv = timeline[j];
const ev = mxEv.event as IEncryptedSeshatEvent;
if (ev.curve25519Key) {
mxEv.makeEncrypted(
EventType.RoomMessageEncrypted,
{ algorithm: ev.algorithm },
ev.curve25519Key,
ev.ed25519Key,
);
// @ts-ignore
mxEv.forwardingCurve25519KeyChain = ev.forwardingCurve25519KeyChain;
delete ev.curve25519Key;
delete ev.ed25519Key;
delete ev.algorithm;
delete ev.forwardingCurve25519KeyChain;
}
}
}
}
async function combinedPagination(searchResult: ISeshatSearchResults): Promise<ISeshatSearchResults> {
const eventIndex = EventIndexPeg.get();
const client = MatrixClientPeg.get();
const searchArgs = searchResult.seshatQuery;
const oldestEventFrom = searchResult.oldestEventFrom;
let localResult: IResultRoomEvents;
let serverSideResult: ISearchResponse;
// Fetch events from the local index if we have a token for it and if it's
// the local indexes turn or the server has exhausted its results.
if (searchArgs.next_batch && (!searchResult.serverSideNextBatch || oldestEventFrom === "server")) {
localResult = await eventIndex.search(searchArgs);
}
// Fetch events from the server if we have a token for it and if it's the
// local indexes turn or the local index has exhausted its results.
if (searchResult.serverSideNextBatch && (oldestEventFrom === "local" || !searchArgs.next_batch)) {
const body = { body: searchResult._query, next_batch: searchResult.serverSideNextBatch };
serverSideResult = await client.search(body);
}
let serverEvents: IResultRoomEvents;
if (serverSideResult) {
serverEvents = serverSideResult.search_categories.room_events;
}
// Combine our events.
const combinedResult = combineResponses(searchResult, localResult, serverEvents);
const response = {
search_categories: {
room_events: combinedResult,
},
};
const oldResultCount = searchResult.results ? searchResult.results.length : 0;
// Let the client process the combined result.
const result = client.processRoomEventsSearch(searchResult, response);
// Restore our encryption info so we can properly re-verify the events.
const newResultCount = result.results.length - oldResultCount;
const newSlice = result.results.slice(Math.max(result.results.length - newResultCount, 0));
restoreEncryptionInfo(newSlice);
searchResult.pendingRequest = null;
return result;
}
function eventIndexSearch(term: string, roomId: string = undefined): Promise<ISearchResults> {
let searchPromise: Promise<ISearchResults>;
if (roomId !== undefined) {
if (MatrixClientPeg.get().isRoomEncrypted(roomId)) {
// The search is for a single encrypted room, use our local
// search method.
searchPromise = localSearchProcess(term, roomId);
} else {
// The search is for a single non-encrypted room, use the
// server-side search.
searchPromise = serverSideSearchProcess(term, roomId);
}
} else {
// Search across all rooms, combine a server side search and a
// local search.
searchPromise = combinedSearch(term);
}
return searchPromise;
}
function eventIndexSearchPagination(searchResult: ISeshatSearchResults): Promise<ISeshatSearchResults> {
const client = MatrixClientPeg.get();
const seshatQuery = searchResult.seshatQuery;
const serverQuery = searchResult._query;
if (!seshatQuery) {
// This is a search in a non-encrypted room. Do the normal server-side
// pagination.
return client.backPaginateRoomEventsSearch(searchResult);
} else if (!serverQuery) {
// This is a search in a encrypted room. Do a local pagination.
const promise = localPagination(searchResult);
searchResult.pendingRequest = promise;
return promise;
} else {
// We have both queries around, this is a search across all rooms so a
// combined pagination needs to be done.
const promise = combinedPagination(searchResult);
searchResult.pendingRequest = promise;
return promise;
}
}
export function searchPagination(searchResult: ISearchResults): Promise<ISearchResults> {
const eventIndex = EventIndexPeg.get();
const client = MatrixClientPeg.get();
if (searchResult.pendingRequest) return searchResult.pendingRequest;
if (eventIndex === null) return client.backPaginateRoomEventsSearch(searchResult);
else return eventIndexSearchPagination(searchResult);
}
export default function eventSearch(term: string, roomId: string = undefined): Promise<ISearchResults> {
const eventIndex = EventIndexPeg.get();
if (eventIndex === null) return serverSideSearchProcess(term, roomId);
else return eventIndexSearch(term, roomId);
} | the_stack |
import passport, { Profile } from 'passport'
import { createAndSetToken } from './vulcan-lib/apollo-server/authentication';
import { Strategy as CustomStrategy } from 'passport-custom'
import { getUser } from './vulcan-lib/apollo-server/context';
import { Users } from '../lib/collections/users/collection';
import { getCookieFromReq } from './utils/httpUtil';
import { Strategy as GoogleOAuthStrategy, Profile as GoogleProfile, VerifyCallback as GoogleVerifyCallback } from 'passport-google-oauth20';
import { Strategy as FacebookOAuthStrategy, Profile as FacebookProfile } from 'passport-facebook';
import { Strategy as GithubOAuthStrategy, Profile as GithubProfile } from 'passport-github2';
import { Strategy as Auth0Strategy, Profile as Auth0Profile, ExtraVerificationParams, AuthenticateOptions } from 'passport-auth0';
import { VerifyCallback } from 'passport-oauth2'
import { DatabaseServerSetting } from './databaseSettings';
import { createMutator, updateMutator } from './vulcan-lib/mutators';
import { combineUrls, getSiteUrl, slugify, Utils } from '../lib/vulcan-lib/utils';
import pick from 'lodash/pick';
import { forumTypeSetting } from '../lib/instanceSettings';
import { userFromAuth0Profile } from './authentication/auth0Accounts';
import { captureException } from '@sentry/core';
import moment from 'moment';
/**
* Passport declares an empty interface User in the Express namespace. We modify
* it once here, and then all Passport user typings will use it.
*
* See: https://github.com/DefinitelyTyped/DefinitelyTyped/commit/91c229dbdb653dbf0da91992f525905893cbeb91#r34805708
*
* It appears that passport is the only user of Express.User, so this choice
* only affects the shape of user objects in this file.
*/
declare global {
// @types/passport made the decision to use the Express namespace. We're
// constrained to follow it
// eslint-disable-next-line @typescript-eslint/no-namespace
namespace Express {
interface User extends DbUser {
}
}
}
const googleClientIdSetting = new DatabaseServerSetting<string | null>('oAuth.google.clientId', null)
const googleOAuthSecretSetting = new DatabaseServerSetting<string | null>('oAuth.google.secret', null)
const auth0ClientIdSetting = new DatabaseServerSetting<string | null>('oAuth.auth0.appId', null)
const auth0OAuthSecretSetting = new DatabaseServerSetting<string | null>('oAuth.auth0.secret', null)
const auth0DomainSetting = new DatabaseServerSetting<string | null>('oAuth.auth0.domain', null)
const facebookClientIdSetting = new DatabaseServerSetting<string | null>('oAuth.facebook.appId', null)
const facebookOAuthSecretSetting = new DatabaseServerSetting<string | null>('oAuth.facebook.secret', null)
const githubClientIdSetting = new DatabaseServerSetting<string | null>('oAuth.github.clientId', null)
const githubOAuthSecretSetting = new DatabaseServerSetting<string | null>('oAuth.github.secret', null)
export const expressSessionSecretSetting = new DatabaseServerSetting<string | null>('expressSessionSecret', null)
type IdFromProfile<P extends Profile> = (profile: P) => string | number
type UserDataFromProfile<P extends Profile> = (profile: P) => Promise<Partial<DbUser>>
async function mergeAccount(profilePath: string, user: DbUser, profile: Profile) {
return await updateMutator({
collection: Users,
documentId: user._id,
set: {[profilePath]: profile} as any,
// Normal updates are not supposed to update services
validate: false
})
}
/**
* Given the provider-appropriate ways to get user info from a profile, create
* a function that handles successful logins from that provider
*/
function createOAuthUserHandler<P extends Profile>(profilePath: string, getIdFromProfile: IdFromProfile<P>, getUserDataFromProfile: UserDataFromProfile<P>) {
return async (_accessToken: string, _refreshToken: string, profile: P, done: VerifyCallback) => {
try {
const profileId = getIdFromProfile(profile)
// Probably impossible
if (!profileId) {
throw new Error('OAuth profile does not have a profile ID')
}
let user = await Users.findOne({[`${profilePath}.id`]: profileId})
if (!user) {
const email = profile.emails?.[0]?.value
// Don't enforce having an email. Facebook OAuth accounts don't necessarily
// have an email address associated (or visible to us).
//
// If an email *is* provided, the OAuth provider verified it, and we should
// be able to trust that.
if (email) {
// Collation here means we're using the case-insensitive index
const matchingUsers = await Users.find({'emails.address': email}, {collation: {locale: 'en', strength: 2}}).fetch()
if (matchingUsers.length > 1) {
throw new Error(`Multiple users found with email ${email}, please contact support`)
}
const user = matchingUsers[0]
if (user) {
const { data: userUpdated } = await mergeAccount(profilePath, user, profile)
if (user.banned && new Date(user.banned) > new Date()) {
return done(new Error("banned"))
}
return done(null, userUpdated)
}
}
const { data: userCreated } = await createMutator({
collection: Users,
document: await getUserDataFromProfile(profile),
validate: false,
currentUser: null
})
return done(null, userCreated)
}
user = await syncOAuthUser(user, profile)
if (user.banned && new Date(user.banned) > new Date()) {
return done(new Error("banned"))
}
return done(null, user)
} catch (err) {
captureException(err);
return done(err)
}
}
}
/**
* Auth0 passes 5 parameters, not 4, so we need to wrap createOAuthUserHandler
*/
function createOAuthUserHandlerAuth0(profilePath: string, getIdFromProfile: IdFromProfile<Auth0Profile>, getUserDataFromProfile: UserDataFromProfile<Auth0Profile>) {
const standardHandler = createOAuthUserHandler(profilePath, getIdFromProfile, getUserDataFromProfile)
return (accessToken: string, refreshToken: string, _extraParams: ExtraVerificationParams, profile: Auth0Profile, done: VerifyCallback) => {
return standardHandler(accessToken, refreshToken, profile, done)
}
}
/**
* If the user's email has been updated by their OAuth provider, change their
* email to match their OAuth provider's given email
*/
async function syncOAuthUser(user: DbUser, profile: Profile): Promise<DbUser> {
if (!profile.emails || !profile.emails.length) {
return user
}
// I'm unable to find documenation of how to interpret the emails object. It's
// plausible we should always set the users email to the first one, but it
// could be that the ordering doesn't matter, in which case we'd want to avoid
// spuriously updating the user's email based on whichever one happened to be
// first. But if their email is entirely missing, we should update it to be
// one given by their OAuth provider. Probably their OAuth provider will only
// ever report one email, in which case this is over-thought.
const profileEmails = profile.emails.map(emailObj => emailObj.value)
if (!profileEmails.includes(user.email)) {
// Attempt to update the email field on the account to match the OAuth-provided
// email. This will fail if the user has both an OAuth and a non-OAuth account
// with the same email.
const preexistingAccountWithEmail = await Users.findOne({email: profileEmails[0]});
if (!preexistingAccountWithEmail) {
const updatedUserResponse = await updateMutator({
collection: Users,
documentId: user._id,
set: {email: profileEmails[0]},
validate: false
})
return updatedUserResponse.data
}
}
return user
}
/**
* Saves desired return path to session data for redirection upon authentication
*
* Assumes the request was made with a query, like /auth/google?returnTo=bar/baz
*
* Requires express-session to be enabled
*
* Sets an expiration - otherwise a stale returnTo could cause future
* non-returnTo logins to erroneously redirect
*/
function saveReturnTo(req: any): void {
if (!expressSessionSecretSetting.get()) return
let { returnTo } = req.query
if (!returnTo || !req.session) return
req.session.loginReturnTo = {
path: returnTo,
// Enough time to login, even if you have to go looking for your password.
// If you take longer than that, then hey, you probably forgot what you were
// doing anyway.
expiration: moment().add(30, 'minutes').toISOString()
}
}
/**
* Gets desired return path from session data
*
* Assumes that the initial request was made with a returnTo query parameter
*/
function getReturnTo(req: any): string {
if (!expressSessionSecretSetting.get() || !req.session?.loginReturnTo) return '/'
if (moment(req.session.loginReturnTo.expiration) < moment()) return '/'
return req.session.loginReturnTo.path
}
const cookieAuthStrategy = new CustomStrategy(async function getUserPassport(req: any, done) {
const loginToken = getCookieFromReq(req, 'loginToken') || getCookieFromReq(req, 'meteor_login_token') // Backwards compatibility with meteor_login_token here
if (!loginToken) {
return done(null, false)
}
const user = await getUser(loginToken)
if (!user) {
return done(null, false)
}
done(null, user)
})
async function deserializeUserPassport(id, done) {
const user = await Users.findOne({_id: id})
if (!user) done()
done(null, user)
}
passport.serializeUser((user, done) => done(null, user._id))
passport.deserializeUser(deserializeUserPassport)
export const addAuthMiddlewares = (addConnectHandler) => {
addConnectHandler(passport.initialize())
passport.use(cookieAuthStrategy)
addConnectHandler('/', (req, res, next) => {
passport.authenticate('custom', (err, user, info) => {
if (err) return next(err)
if (!user) return next()
req.logIn(user, (err) => {
if (err) return next(err)
next()
})
})(req, res, next)
})
addConnectHandler('/logout', (req, res, next) => {
passport.authenticate('custom', (err, user, info) => {
if (err) return next(err)
req.logOut()
// Remove session cookies
const cookieUpdates = ['meteor_login_token', 'loginToken', 'connect.sid']
.filter(cookieName => getCookieFromReq(req, cookieName))
// The accepted way to delete a cookie is to set an expiration date in the past.
.map(cookieName => `${cookieName}= ; expires=${new Date(0).toUTCString()}`)
if (cookieUpdates.length) {
// We need to set all Set-Cookie headers at once, or we'd overwrite the
// previous ones. The way to set multiple Set-Cookie headers is to set
// it with an array.
// https://nodejs.org/api/http.html#http_request_setheader_name_value
res.setHeader('Set-Cookie', cookieUpdates)
}
res.statusCode=302;
// Need to log the user out of their Auth0 account. Otherwise when they
// next try to login they won't be given a choice, just auto-resumed to
// the same Auth0 account.
if (auth0DomainSetting.get() && auth0ClientIdSetting.get() && forumTypeSetting.get() === 'EAForum') {
// Will redirect to our homepage, and is a noop if they're not logged in
// to an Auth0 account, so this is very non-disruptive
res.setHeader('Location', `https://${auth0DomainSetting.get()}/v2/logout?client_id=${auth0ClientIdSetting.get()}`);
} else {
res.setHeader('Location', '/');
}
return res.end();
})(req, res, next);
})
const googleClientId = googleClientIdSetting.get()
const googleOAuthSecret = googleOAuthSecretSetting.get()
if (googleClientId && googleOAuthSecret) {
passport.use(new GoogleOAuthStrategy({
clientID: googleClientId,
clientSecret: googleOAuthSecret,
callbackURL: `${getSiteUrl()}auth/google/callback`,
proxy: true
},
createOAuthUserHandler<GoogleProfile>('services.google', profile => profile.id, async profile => ({
email: profile.emails?.[0].value,
services: {
google: profile
},
emails: profile.emails?.[0].value ? [{address: profile.emails?.[0].value, verified: true}] : [],
username: await Utils.getUnusedSlugByCollectionName("Users", slugify(profile.displayName)),
displayName: profile.displayName,
emailSubscribedToCurated: true
// Type assertion here is because @types/passport-google-oauth20 doesn't
// think their verify callback is able to take a null in the place of the
// error, which seems like a bug and which prevents are seemingly working
// code from type-checking
})) as (_accessToken: string, _refreshToken: string, profile: GoogleProfile, done: GoogleVerifyCallback) => Promise<void>
))}
const facebookClientId = facebookClientIdSetting.get()
const facebookOAuthSecret = facebookOAuthSecretSetting.get()
if (facebookClientId && facebookOAuthSecret) {
passport.use(new FacebookOAuthStrategy({
clientID: facebookClientId,
clientSecret: facebookOAuthSecret,
callbackURL: `${getSiteUrl()}auth/facebook/callback`,
profileFields: ['id', 'emails', 'name', 'displayName'],
},
createOAuthUserHandler<FacebookProfile>('services.facebook', profile => profile.id, async profile => ({
email: profile.emails?.[0].value,
emails: profile.emails?.[0].value ? [{address: profile.emails?.[0].value, verified: true}] : [],
services: {
facebook: profile
},
username: await Utils.getUnusedSlugByCollectionName("Users", slugify(profile.displayName)),
displayName: profile.displayName,
emailSubscribedToCurated: true
}))
))
}
const githubClientId = githubClientIdSetting.get()
const githubOAuthSecret = githubOAuthSecretSetting.get()
if (githubClientId && githubOAuthSecret) {
passport.use(new GithubOAuthStrategy({
clientID: githubClientId,
clientSecret: githubOAuthSecret,
callbackURL: `${getSiteUrl()}auth/github/callback`,
scope: [ 'user:email' ], // fetches non-public emails as well
},
createOAuthUserHandler<GithubProfile>('services.github', profile => parseInt(profile.id), async profile => ({
email: profile.emails?.[0].value,
emails: profile.emails?.[0].value ? [{address: profile.emails?.[0].value, verified: true}] : [],
services: {
github: profile
},
username: await Utils.getUnusedSlugByCollectionName("Users", slugify(profile.username || profile.displayName)),
displayName: profile.username || profile.displayName,
emailSubscribedToCurated: true
}))
));
}
const handleAuthenticate = (req, res, next, err, user, info) => {
if (err) {
if (err.message === "banned") {
res.redirect(301, '/banNotice');
return res.end();
} else {
return next(err)
}
}
if (req.query?.error) {
const { error, error_description} = req.query
return next(new Error(`${error}: ${error_description}`))
}
if (!user) return next()
req.logIn(user, async (err) => {
if (err) return next(err)
await createAndSetToken(req, res, user)
const returnTo = getReturnTo(req)
res.statusCode=302;
res.setHeader('Location', returnTo)
return res.end();
})
}
// NB: You must also set the expressSessionSecret setting in your database
// settings - auth0 passport strategy relies on express-session to store state
const auth0ClientId = auth0ClientIdSetting.get();
const auth0OAuthSecret = auth0OAuthSecretSetting.get()
const auth0Domain = auth0DomainSetting.get()
if (auth0ClientId && auth0OAuthSecret && auth0Domain) {
passport.use(new Auth0Strategy(
{
clientID: auth0ClientId,
clientSecret: auth0OAuthSecret,
domain: auth0Domain,
callbackURL: combineUrls(getSiteUrl(), 'auth/auth0/callback')
},
createOAuthUserHandlerAuth0('services.auth0', profile => profile.id, userFromAuth0Profile)
));
}
addConnectHandler('/auth/google/callback', (req, res, next) => {
passport.authenticate('google', {}, (err, user, info) => {
handleAuthenticate(req, res, next, err, user, info);
})(req, res, next)
})
addConnectHandler('/auth/google', (req, res, next) => {
saveReturnTo(req)
passport.authenticate('google', {
scope: [
'https://www.googleapis.com/auth/plus.login',
'https://www.googleapis.com/auth/userinfo.email'
], accessType: "offline", prompt: "consent"
})(req, res, next)
})
addConnectHandler('/auth/facebook/callback', (req, res, next) => {
passport.authenticate('facebook', {}, (err, user, info) => {
handleAuthenticate(req, res, next, err, user, info);
})(req, res, next)
})
addConnectHandler('/auth/facebook', (req, res, next) => {
saveReturnTo(req)
passport.authenticate('facebook')(req, res, next)
})
addConnectHandler('/auth/auth0/callback', (req, res, next) => {
passport.authenticate('auth0', (err, user, info) => {
handleAuthenticate(req, res, next, err, user, info)
})(req, res, next)
})
addConnectHandler('/auth/auth0', (req, res, next) => {
const extraParams = pick(req.query, ['screen_hint', 'prompt'])
saveReturnTo(req)
passport.authenticate('auth0', {
scope: 'profile email openid offline_access',
...extraParams
} as AuthenticateOptions)(req, res, next)
})
addConnectHandler('/auth/github/callback', (req, res, next) => {
passport.authenticate('github', {}, (err, user, info) => {
handleAuthenticate(req, res, next, err, user, info);
})(req, res, next)
})
addConnectHandler('/auth/github', (req, res, next) => {
saveReturnTo(req)
passport.authenticate('github', { scope: ['user:email']})(req, res, next)
})
} | the_stack |
import * as vscode from 'vscode';
import { WorkspaceFolder, DebugConfiguration, ProviderResult, CancellationToken } from 'vscode';
import { DebugSessionClass } from './debugadapter';
import * as Net from 'net';
import { DecorationClass, Decoration } from './decoration';
import {LogSocket, LogCustomCode, LogSocketCommands, Log } from './log';
import {Utility} from './misc/utility';
import {PackageInfo} from './whatsnew/packageinfo';
import {WhatsNewView} from './whatsnew/whatsnewview';
import {HelpProvider} from './help/helpprovider';
import {GlobalStorage} from './globalstorage';
import {Z80UnitTestRunner} from './z80unittests/z80unittestrunner';
import {DiagnosticsHandler} from './diagnosticshandler';
/*
let aa = 1;
(() => {
let aa1 = aa;
function f() {
let aaf = aa1;
};
(() => {
let aa2 = aa;
let aa3 = aa1;
f();
})();
})();
*/
//var thisline = new Error().lineNumber;
/**
* 'activate' is called when one of the package.json activationEvents
* fires the first time.
* Afterwards it is not called anymore.
* 'deactivate' is called when vscode is terminated.
* I.e. the activationEvents just distribute the calling of the extensions
* little bit. Instead one could as well use "*", i.e. activate on all events.
*
* Registers configuration provider and command palette commands.
* @param context
*/
export function activate(context: vscode.ExtensionContext) {
//console.log("Extension ACTIVATED");
/*
let ut;
try {
ut = Utility.require('/Volumes/SDDPCIE2TB/Projects/Z80/vscode/DeZog/src/firsttests2.ut.jsm');
//ut.suiteStack[0].func();
}
catch(e) {
console.log(e);
}
ut = ut;
*/
// Init package info
PackageInfo.Init(context);
// Init global storage
GlobalStorage.Init(context);
// Init/subscribe diagnostics
DiagnosticsHandler.Init(context);
// Save the extension path also to PackageInfo
const extPath = context.extensionPath;
// it is also stored here as Utility does not include vscode which is more unit-test-friendly.
Utility.setExtensionPath(extPath);
// Check version and show 'What's new' if necessary.
const mjrMnrChanged = WhatsNewView.updateVersion(context);
if (mjrMnrChanged) {
// Major or minor version changed so show the whatsnew page.
new WhatsNewView();
}
// Register the additional command to view the "Whats' New" page.
context.subscriptions.push(vscode.commands.registerCommand("dezog.whatsNew", () => new WhatsNewView()));
// Register the 'DeZog Help' webview
const helpProvider = new HelpProvider();
context.subscriptions.push(
vscode.window.registerWebviewViewProvider("dezog.helpview", helpProvider, {webviewOptions: {retainContextWhenHidden: false}})
);
// Command to show the DeZog Help
context.subscriptions.push(vscode.commands.registerCommand('dezog.help', () => helpProvider.createHelpView()));
// Enable e.g. logging.
const extension = PackageInfo.extension;
const packageJSON = extension.packageJSON;
const extensionBaseName = packageJSON.name;
const configuration = PackageInfo.getConfiguration();
configureLogging(configuration);
context.subscriptions.push(vscode.workspace.onDidChangeConfiguration(event => {
// Logging changed
if (event.affectsConfiguration(extensionBaseName + '.logpanel')
|| event.affectsConfiguration(extensionBaseName+'.socket.logpanel')
|| event.affectsConfiguration(extensionBaseName+'.customcode.logpanel')) {
configureLogging(configuration);
}
// 'donated' changed
if (event.affectsConfiguration(extensionBaseName + '.donated')) {
// Reload complete html
helpProvider.setMainHtml();
}
}));
// Note: Weinand: "VS Code runs extensions on the node version that is built into electron (on which VS Code is based). This cannot be changed."
const version = process.version;
console.log(version);
context.subscriptions.push(vscode.debug.onDidTerminateDebugSession(s => {
console.log(`terminated: ${s.type} ${s.name}`);
}));
// Command to change the program counter via menu.
context.subscriptions.push(vscode.commands.registerCommand('dezog.movePCtoCursor', () => {
// Only allowed in debug context
if (!vscode.debug.activeDebugSession)
return;
// Get focussed editor/file and line
const editor = vscode.window.activeTextEditor;
if (!editor)
return;
const position = editor.selection.anchor;
const filename = editor.document.fileName;
// Send to debug adapter
vscode.debug.activeDebugSession.customRequest('setPcToLine', [filename, position.line]);
}));
// Command to do a disassembly at the cursor's position.
context.subscriptions.push(vscode.commands.registerCommand('dezog.disassemblyAtCursor', async () => {
// Only allowed in debug context
if (!vscode.debug.activeDebugSession)
return;
// Get focussed editor/file and line
const editor = vscode.window.activeTextEditor;
if (!editor)
return;
// Go through all selections in case of multiple selections
for (const selection of editor.selections) {
let from = selection.anchor;
let to = selection.active;
const filename = editor.document.fileName;
// Adjust
if (from.line > to.line) {
// exchange
const tmp = from;
from = to;
to = tmp;
}
const fromLine = from.line;
let toLine = to.line;
if (toLine > fromLine) {
if (to.character == 0)
toLine--;
}
// Send to debug adapter
await vscode.debug.activeDebugSession.customRequest('disassemblyAtCursor', [filename, fromLine, toLine]);
}
}));
// Command to disable code coverage display and analyzes.
context.subscriptions.push(vscode.commands.registerCommand('dezog.clearAllDecorations', () => {
Decoration?.clearAllDecorations();
}));
// Register a configuration provider for 'dezog' debug type
const configProvider = new DeZogConfigurationProvider()
context.subscriptions.push(vscode.debug.registerDebugConfigurationProvider('dezog', configProvider));
// Registers the debug inline value provider
const asmDocSelector: vscode.DocumentSelector = {scheme: 'file'};
const inlineValuesProvider = new DeZogInlineValuesProvider();
context.subscriptions.push(vscode.languages.registerInlineValuesProvider(asmDocSelector, inlineValuesProvider));
/*
Actually this did not work very well for other reasons:
It's better to retrieve the file/lineNr from the PC value.
Therefore I removed this.
// Register an evaluation provider for hovering.
// Note: Function is only called in debug context and only for the file currently being debugged.
// Therefore '' is enough.
vscode.languages.registerEvaluatableExpressionProvider('*', {
provideEvaluatableExpression(
document: vscode.TextDocument,
position: vscode.Position
): vscode.ProviderResult<vscode.EvaluatableExpression> {
const wordRange = document.getWordRangeAtPosition(position, /[\w\.]+/);
if (wordRange) {
const filePath = document.fileName;
if (filePath) {
const text = document.getText(wordRange);
// Put additionally text file path and position into 'expression',
// Format: "word:filePath:line:column"
// Example: "data_b60:/Volumes/SDDPCIE2TB/Projects/Z80/asm/z80-sld/main.asm:28:12
const expression = text + ':' + filePath + ':' + position.line + ':' + position.character;
return new vscode.EvaluatableExpression(wordRange, expression);
}
}
return undefined; // Nothing found
}
});
*/
// Initialize the Coverage singleton.
DecorationClass.Initialize();
// Initialize the unit tester.
Z80UnitTestRunner.Init();
}
/**
* 'deactivate' is only called when vscode is terminated.
*/
export function deactivate() {
//console.log("Extension DEACTIVATED");
}
/**
* This debug inline values provider simply provides nothing.
* This is to prevent that the default debug inline values provider is used instead,
* which would show basically garbage.
*
* So for settings.json "debug.inlineValues":
* - false: The inline provider is not called
* - true/"auto": The inline provider is called but returns nothing.
*
* I'm not using the vscode approach for debug values but decorations instead because:
* - The decorations implementation is ready and working fine. To change would give
* no advantage other than additional effort and bugs.
* - vscode only shows the inline values for the currently debugged file.
* The decorations show them on all files, i.e. it is easier to follow where the
* instruction history came from.
*/
class DeZogInlineValuesProvider implements vscode.InlineValuesProvider {
//onDidChangeInlineValues?: vscode.Event<void> | undefined;
provideInlineValues(document: vscode.TextDocument, viewPort: vscode.Range, context: vscode.InlineValueContext, token: vscode.CancellationToken): vscode.ProviderResult<vscode.InlineValue[]> {
return undefined;
}
};
/**
* Instantiates the ZesaruxDebugAdapter and sets up the
* socket connection to it.
*/
class DeZogConfigurationProvider implements vscode.DebugConfigurationProvider {
private _server?: Net.Server;
/**
* Instantiates DebugAdapter (DebugSessionClass) and sets up the
* soccket connection to it.
*/
resolveDebugConfiguration(folder: WorkspaceFolder | undefined, config: DebugConfiguration, token?: CancellationToken): ProviderResult<DebugConfiguration> {
// start port listener on launch of first debug session
if (!this._server) {
// start listening on a random port
this._server = Net.createServer(socket => {
const session = new DebugSessionClass();
session.setRunAsServer(true);
session.start(<NodeJS.ReadableStream>socket, socket);
}).listen(0);
}
// make VS Code connect to debug server instead of launching debug adapter
const addrInfo = this._server.address() as Net.AddressInfo;
Utility.assert(typeof addrInfo != 'string');
config.debugServer = addrInfo.port;
return config;
}
/**
* End.
*/
dispose() {
if (this._server) {
this._server.close();
}
}
}
/**
* Configures the logging from the settings.
*/
function configureLogging(configuration: vscode.WorkspaceConfiguration) {
// Global log
{
const logToPanel = configuration.get<boolean>('logpanel');
const channelName = (logToPanel) ? "DeZog" : undefined;
const channelOut = (channelName) ? vscode.window.createOutputChannel(channelName) : undefined;
Log.init(channelOut);
}
// Custom code log
{
const logToPanel = configuration.get<boolean>('customcode.logpanel');
const channelName = (logToPanel) ? "DeZog Custom Code" : undefined;
const channelOut = (channelName) ? vscode.window.createOutputChannel(channelName) : undefined;
LogCustomCode.init(channelOut);
}
// Socket log
{
const logToPanel = configuration.get<boolean>('socket.logpanel');
const channelName = (logToPanel) ? "DeZog Socket" : undefined;
const channelOut = (channelName) ? vscode.window.createOutputChannel(channelName) : undefined;
LogSocket.init(channelOut);
}
// Enable to get a log of the commands only
if (false) {
const channelOut = vscode.window.createOutputChannel("DeZog Socket Commands");
LogSocketCommands.init(channelOut, undefined);
}
} | the_stack |
import {withTask} from 'react-palm/tasks';
import {default as Console} from 'global/console';
import {generateHashId, getError, isPlainObject, toArray} from '../utils/utils';
import {
EXPORT_FILE_TO_CLOUD_TASK,
ACTION_TASK,
DELAY_TASK,
LOAD_CLOUD_MAP_TASK,
GET_SAVED_MAPS_TASK
} from '../tasks/tasks';
import {
exportFileSuccess,
exportFileError,
postSaveLoadSuccess,
loadCloudMapSuccess,
getSavedMapsSuccess,
getSavedMapsError,
loadCloudMapError,
resetProviderStatus
} from '../actions/provider-actions';
import {removeNotification, toggleModal, addNotification} from '../actions/ui-state-actions';
import {addDataToMap} from '../actions/actions';
import {
DEFAULT_NOTIFICATION_TYPES,
DEFAULT_NOTIFICATION_TOPICS,
DATASET_FORMATS,
OVERWRITE_MAP_ID
} from '../constants/default-settings';
import {FILE_CONFLICT_MSG} from '../cloud-providers';
import {DATASET_HANDLERS} from '../processors/data-processor';
import * as ProviderActions from '../actions/provider-actions';
import {MapListItem} from '../cloud-providers';
type ActionPayload<P> = {
type?: string;
payload: P;
};
export type ProviderState = {
isProviderLoading: boolean;
isCloudMapLoading: boolean;
providerError: any;
currentProvider: string | null;
successInfo: any;
mapSaved: null | string;
initialState?: any;
visualizations: MapListItem[];
};
export const INITIAL_PROVIDER_STATE: ProviderState = {
isProviderLoading: false,
isCloudMapLoading: false,
providerError: null,
currentProvider: null,
successInfo: {},
mapSaved: null,
visualizations: []
};
declare function withTask<T>(s: T, any: any): T;
function createActionTask(action, payload) {
if (typeof action === 'function') {
return ACTION_TASK().map(_ => action(payload));
}
return null;
}
function _validateProvider(provider, method) {
if (!provider) {
Console.error(`provider is not defined`);
return false;
}
if (typeof provider[method] !== 'function') {
Console.error(`${method} is not a function of Cloud provider: ${provider.name}`);
return false;
}
return true;
}
function createGlobalNotificationTasks({
type,
message,
delayClose = true
}: {
type?: string;
message: string;
delayClose?: boolean;
}) {
const id = generateHashId();
const successNote = {
id,
type: DEFAULT_NOTIFICATION_TYPES[type || ''] || DEFAULT_NOTIFICATION_TYPES.success,
topic: DEFAULT_NOTIFICATION_TOPICS.global,
message
};
const task = ACTION_TASK().map(_ => addNotification(successNote));
return delayClose ? [task, DELAY_TASK(3000).map(_ => removeNotification(id))] : [task];
}
/**
* This method will export the current kepler config file to the chosen cloud proder
* add returns a share URL
*
*/
export const exportFileToCloudUpdater = (
state: ProviderState,
action: ActionPayload<ProviderActions.ExportFileToCloudPayload>
): ProviderState => {
const {mapData, provider, options = {}, onSuccess, onError, closeModal} = action.payload;
if (!_validateProvider(provider, 'uploadMap')) {
return state;
}
const newState = {
...state,
isProviderLoading: true,
currentProvider: provider.name
};
// payload called by provider.uploadMap
const payload = {
mapData,
options
};
const uploadFileTask = EXPORT_FILE_TO_CLOUD_TASK({provider, payload}).bimap(
// success
response => exportFileSuccess({response, provider, options, onSuccess, closeModal}),
// error
error => exportFileError({error, provider, options, onError})
);
return withTask(newState, uploadFileTask);
};
export const exportFileSuccessUpdater = (
state: ProviderState,
action: ActionPayload<ProviderActions.ExportFileSuccessPayload>
): ProviderState => {
const {response, provider, options = {}, onSuccess, closeModal} = action.payload;
const newState = {
...state,
isProviderLoading: false,
// TODO: do we always have to store this?
successInfo: response,
...(!options.isPublic
? {
mapSaved: provider.name
}
: {})
};
const tasks = [
createActionTask(onSuccess, {response, provider, options}),
closeModal &&
ACTION_TASK().map(_ => postSaveLoadSuccess(`Map saved to ${state.currentProvider}!`))
].filter(d => d);
return tasks.length ? withTask(newState, tasks) : newState;
};
/**
* Close modal on success and display notification
*/
export const postSaveLoadSuccessUpdater = (
state: ProviderState,
action: ActionPayload<ProviderActions.PostSaveLoadSuccessPayload>
): ProviderState => {
const message = action.payload || `Saved / Load to ${state.currentProvider} Success`;
const tasks = [
ACTION_TASK().map(_ => toggleModal(null)),
ACTION_TASK().map(_ => resetProviderStatus()),
...createGlobalNotificationTasks({message})
];
return withTask(state, tasks);
};
export const exportFileErrorUpdater = (
state: ProviderState,
action: ActionPayload<ProviderActions.ExportFileErrorPayload>
): ProviderState => {
const {error, provider, onError} = action.payload;
const newState = {
...state,
isProviderLoading: false
};
if (isFileConflict(error)) {
newState.mapSaved = provider.name;
return withTask(newState, [ACTION_TASK().map(_ => toggleModal(OVERWRITE_MAP_ID))]);
}
newState.providerError = getError(error);
const task = createActionTask(onError, {error, provider});
return task ? withTask(newState, task) : newState;
};
export const loadCloudMapUpdater = (
state: ProviderState,
action: ActionPayload<ProviderActions.LoadCloudMapPayload>
): ProviderState => {
const {loadParams, provider, onSuccess, onError} = action.payload;
if (!loadParams) {
Console.warn('load map error: loadParams is undefined');
return state;
}
if (!_validateProvider(provider, 'downloadMap')) {
return state;
}
const newState = {
...state,
isProviderLoading: true,
isCloudMapLoading: true
};
// payload called by provider.downloadMap
const uploadFileTask = LOAD_CLOUD_MAP_TASK({provider, payload: loadParams}).bimap(
// success
// @ts-expect-error
response => loadCloudMapSuccess({response, loadParams, provider, onSuccess, onError}),
// error
// @ts-expect-error
error => loadCloudMapError({error, provider, onError})
);
return withTask(newState, uploadFileTask);
};
function isFileConflict(error) {
return error && error.message === FILE_CONFLICT_MSG;
}
function checkLoadMapResponseError(response) {
if (!response || !isPlainObject(response)) {
return new Error('Load map response is empty');
}
if (!isPlainObject(response.map)) {
return new Error(`Load map response should be an object property "map"`);
}
if (!response.map.datasets || !response.map.config) {
return new Error(`Load map response.map should be an object with property datasets or config`);
}
return null;
}
function getDatasetHandler(format) {
const defaultHandler = DATASET_HANDLERS[DATASET_FORMATS.csv];
if (!format) {
Console.warn('format is not provided in load map response, will use csv by default');
return defaultHandler;
}
if (!DATASET_HANDLERS[format]) {
const supportedFormat = Object.keys(DATASET_FORMATS)
.map(k => `'${k}'`)
.join(', ');
Console.warn(
`unknown format ${format}. Please use one of ${supportedFormat}, will use csv by default`
);
return defaultHandler;
}
return DATASET_HANDLERS[format];
}
function parseLoadMapResponse(response, loadParams, provider) {
const {map, format} = response;
const processorMethod = getDatasetHandler(format);
const parsedDatasets = toArray(map.datasets).map((ds, i) => {
if (format === DATASET_FORMATS.keplergl) {
// no need to obtain id, directly pass them in
return processorMethod(ds);
}
const info = (ds && ds.info) || {id: generateHashId(6)};
const data = processorMethod(ds.data || ds);
return {info, data};
});
const info = {
...map.info,
provider: provider.name,
loadParams
};
return {
datasets: parsedDatasets,
info,
...(map.config ? {config: map.config} : {})
};
}
export const loadCloudMapSuccessUpdater = (
state: ProviderState,
action: ActionPayload<ProviderActions.LoadCloudMapSuccessPayload>
): ProviderState => {
const {response, loadParams, provider, onSuccess, onError} = action.payload;
const formatError = checkLoadMapResponseError(response);
if (formatError) {
// if response format is not correct
return exportFileErrorUpdater(state, {
payload: {error: formatError, provider, onError}
});
}
const newState = {
...state,
mapSaved: provider.name,
currentProvider: provider.name,
isCloudMapLoading: false,
isProviderLoading: false
};
const payload = parseLoadMapResponse(response, loadParams, provider);
const tasks = [
ACTION_TASK().map(_ => addDataToMap(payload)),
createActionTask(onSuccess, {response, loadParams, provider}),
ACTION_TASK().map(_ => postSaveLoadSuccess(`Map from ${provider.name} loaded`))
].filter(d => d);
return tasks.length ? withTask(newState, tasks) : newState;
};
export const loadCloudMapErrorUpdater = (
state: ProviderState,
action: ActionPayload<ProviderActions.LoadCloudMapErrorPayload>
): ProviderState => {
const message = getError(action.payload.error) || `Error loading saved map`;
Console.warn(message);
const newState = {
...state,
isProviderLoading: false,
isCloudMapLoading: false,
providerError: null
};
return withTask(
newState,
createGlobalNotificationTasks({type: 'error', message, delayClose: false})
);
};
export const resetProviderStatusUpdater = (state: ProviderState): ProviderState => ({
...state,
isProviderLoading: false,
providerError: null,
isCloudMapLoading: false,
successInfo: {}
});
/**
* Set current cloudProvider
*/
export const setCloudProviderUpdater = (
state: ProviderState,
action: ActionPayload<ProviderActions.SetCloudProviderPayload>
): ProviderState => ({
...state,
isProviderLoading: false,
providerError: null,
successInfo: {},
currentProvider: action.payload
});
export const getSavedMapsUpdater = (
state: ProviderState,
action: ActionPayload<ProviderActions.GetSavedMapsPayload>
): ProviderState => {
const provider = action.payload;
if (!_validateProvider(provider, 'listMaps')) {
return state;
}
const getSavedMapsTask = GET_SAVED_MAPS_TASK(provider).bimap(
// success
visualizations => getSavedMapsSuccess({visualizations, provider}),
// error
error => getSavedMapsError({error, provider})
);
return withTask(
{
...state,
isProviderLoading: true
},
getSavedMapsTask
);
};
export const getSavedMapsSuccessUpdater = (
state: ProviderState,
action: ActionPayload<ProviderActions.GetSavedMapsSuccessPayload>
): ProviderState => ({
...state,
isProviderLoading: false,
visualizations: action.payload.visualizations
});
export const getSavedMapsErrorUpdater = (
state: ProviderState,
action: ActionPayload<ProviderActions.GetSavedMapsErrorPayload>
): ProviderState => {
const message =
getError(action.payload.error) || `Error getting saved maps from ${state.currentProvider}`;
Console.warn(action.payload.error);
const newState = {
...state,
currentProvider: null,
isProviderLoading: false
};
return withTask(
newState,
createGlobalNotificationTasks({type: 'error', message, delayClose: false})
);
}; | the_stack |
import { LokiEventEmitter } from "./event_emitter";
import { UniqueIndex } from "./unique_index";
import { ResultSet } from "./result_set";
import { DynamicView } from "./dynamic_view";
import { IRangedIndex, RangedIndexFactoryMap } from "./ranged_indexes";
import { ComparatorMap } from "./comparators";
import { clone, CloneMethod } from "./clone";
import { Doc, Dict } from "../../common/types";
import { FullTextSearch } from "../../full-text-search/src/full_text_search";
import { PLUGINS } from "../../common/plugin";
import { Analyzer } from "../../full-text-search/src/analyzer/analyzer";
function average(array: number[]): number {
return (array.reduce((a, b) => a + b, 0)) / array.length;
}
function standardDeviation(values: number[]): number {
const avg = average(values);
const squareDiffs = values.map((value) => {
const diff = value - avg;
return diff * diff;
});
const avgSquareDiff = average(squareDiffs);
return Math.sqrt(avgSquareDiff);
}
/**
* Returns an array with the value of a nested property of an object.
* Returns an array of values if the nested property is across child arrays.
* @param {object} obj - the object
* @param {string[]} path - the path of the nested property
* @param {any[]} array - the result array
* @param {number} pathIdx - the current path idx
* @returns {boolean} true if nested property is across child arrays, otherwise false
*/
function getNestedPropertyValue(obj: object, path: string[], array: any[], pathIdx: number = 0): boolean {
if (obj === undefined) {
return false;
}
if (pathIdx + 1 === path.length) {
array.push(obj[path[pathIdx]]);
return false;
}
const curr = obj[path[pathIdx]];
if (Array.isArray(curr)) {
for (let i = 0; i < curr.length; i++) {
getNestedPropertyValue(curr[i], path, array, pathIdx + 1);
}
return true;
} else {
return getNestedPropertyValue(curr, path, array, pathIdx + 1);
}
}
/**
* Collection class that handles documents of same type
* @extends LokiEventEmitter
* @param <TData> - the data type
* @param <TNested> - nested properties of data type
*/
export class Collection<TData extends object = object, TNested extends object = object, T extends TData & TNested = TData & TNested> extends LokiEventEmitter {
// the name of the collection
public name: string;
// the data held by the collection
public _data: Doc<T>[] = [];
// index of id
private _idIndex: number[] = [];
// user defined indexes
public _rangedIndexes: { [P in keyof T]?: Collection.RangedIndexMeta } = {};
// loki obj map
public _lokimap: { [$loki : number]: Doc<T> } = {};
// default comparator name to use for unindexed sorting
public _unindexedSortComparator: string = "js";
// default LokiOperatorPackage ('default' uses fastest 'javascript' comparisons)
public _defaultLokiOperatorPackage: string = "js";
/**
* Unique constraints contain duplicate object references, so they are not persisted.
* We will keep track of properties which have unique constraints applied here, and regenerate on load.
*/
public _constraints: {
unique: {
[P in keyof T]?: UniqueIndex;
}
} = {unique: {}};
/**
* Transforms will be used to store frequently used query chains as a series of steps which itself can be stored along
* with the database.
*/
public _transforms: Dict<Collection.Transform<T>[]> = {};
/**
* In autosave scenarios we will use collection level dirty flags to determine whether save is needed.
* currently, if any collection is dirty we will autosave the whole database if autosave is configured.
* Defaulting to true since this is called from addCollection and adding a collection should trigger save.
*/
public _dirty: boolean = true;
// private holder for cached data
private _cached: {
index: number[];
data: Doc<T>[];
rangedIndexes: { [name: string]: Collection.RangedIndexMeta };
} = null;
/**
* Is collection transactional.
*/
private _transactional: boolean;
/**
* Options to clone objects when inserting them.
*/
public _cloneObjects: boolean;
/**
* Default clone method (if enabled) is parse-stringify.
*/
public _cloneMethod: CloneMethod;
/**
* If set to true we will not maintain a meta property for a document.
*/
private _disableMeta: boolean;
/**
* Disable track changes.
*/
private _disableChangesApi: boolean;
/**
* Disable delta update object style on changes.
*/
public _disableDeltaChangesApi: boolean;
/**
* By default, if you insert a document with a Date value for an indexed property, we will convert that value to number.
*/
private _serializableIndexes: boolean;
/**
* Name of path of used nested properties.
*/
private _nestedProperties: { name: keyof TNested, path: string[] }[] = [];
/**
* Option to activate a cleaner daemon - clears "aged" documents at set intervals.
*/
public _ttl: Collection.TTL = {
age: null,
ttlInterval: null,
daemon: null
};
// currentMaxId - change manually at your own peril!
private _maxId: number = 0;
private _dynamicViews: DynamicView<T>[] = [];
/**
* Changes are tracked by collection and aggregated by the db.
*/
private _changes: Collection.Change[] = [];
/**
* stages: a map of uniquely identified 'stages', which hold copies of objects to be
* manipulated without affecting the data in the original collection
*/
private _stages: object = {};
private _commitLog: { timestamp: number; message: string; data: any }[] = [];
public _fullTextSearch: FullTextSearch;
/**
* @param {string} name - collection name
* @param {(object)} [options={}] - a configuration object
* @param {string[]} [options.unique=[]] - array of property names to define unique constraints for
* @param {string[]} [options.exact=[]] - array of property names to define exact constraints for
* @param {RangedIndexOptions} [options.rangedIndexes] - configuration object for ranged indexes
* @param {boolean} [options.asyncListeners=false] - whether listeners are invoked asynchronously
* @param {boolean} [options.disableMeta=false] - set to true to disable meta property on documents
* @param {boolean} [options.disableChangesApi=true] - set to false to enable Changes API
* @param {boolean} [options.disableDeltaChangesApi=true] - set to false to enable Delta Changes API (requires Changes API, forces cloning)
* @param {boolean} [options.clone=false] - specify whether inserts and queries clone to/from user
* @param {boolean} [options.serializableIndexes=true] - converts date values on binary indexed property values are serializable
* @param {string} [options.cloneMethod="deep"] - the clone method
* @param {number} [options.transactional=false] - ?
* @param {number} [options.ttl=] - age of document (in ms.) before document is considered aged/stale.
* @param {number} [options.ttlInterval=] - time interval for clearing out 'aged' documents; not set by default
* @param {string} [options.unindexedSortComparator="js"] "js", "abstract", "abstract-date", "loki" or other registered comparator name
* @param {string} [options.defaultLokiOperatorPackage="js"] "js", "loki", "comparator" (or user defined) query ops package
* @param {FullTextSearch.FieldOptions} [options.fullTextSearch=] - the full-text search options
* @see {@link Loki#addCollection} for normal creation of collections
*/
constructor(name: string, options: Collection.Options<TData, TNested> = {}) {
super();
// Consistency checks.
if (options && options.disableMeta === true) {
if (options.disableChangesApi === false) {
throw new Error("disableMeta option cannot be passed as true when disableChangesApi is passed as false");
}
if (options.disableDeltaChangesApi === false) {
throw new Error("disableMeta option cannot be passed as true when disableDeltaChangesApi is passed as false");
}
if (typeof options.ttl === "number" && options.ttl > 0) {
throw new Error("disableMeta option cannot be passed as true when ttl is enabled");
}
}
// the name of the collection
this.name = name;
/* OPTIONS */
this._unindexedSortComparator = options.unindexedSortComparator || "js";
this._defaultLokiOperatorPackage = options.defaultLokiOperatorPackage || "js";
// exact match and unique constraints
if (options.unique !== undefined) {
if (!Array.isArray(options.unique)) {
options.unique = [options.unique];
}
options.unique.forEach((prop) => {
this._constraints.unique[prop] = new UniqueIndex(prop as string);
});
}
// Full text search
if (PLUGINS["FullTextSearch"] !== undefined) {
this._fullTextSearch = options.fullTextSearch !== undefined
? new (PLUGINS["FullTextSearch"])(options.fullTextSearch) : null;
} else {
this._fullTextSearch = null;
}
// .
this._transactional = options.transactional !== undefined ? options.transactional : false;
// .
this._cloneObjects = options.clone !== undefined ? options.clone : false;
// .
this._asyncListeners = options.asyncListeners !== undefined ? options.asyncListeners : false;
// .
this._disableMeta = options.disableMeta !== undefined ? options.disableMeta : false;
// .
this._disableChangesApi = options.disableChangesApi !== undefined ? options.disableChangesApi : true;
// .
this._disableDeltaChangesApi = options.disableDeltaChangesApi !== undefined ? options.disableDeltaChangesApi : true;
// .
this._cloneMethod = options.cloneMethod !== undefined ? options.cloneMethod : "deep";
if (this._disableChangesApi) {
this._disableDeltaChangesApi = true;
}
// .
this._serializableIndexes = options.serializableIndexes !== undefined ? options.serializableIndexes : true;
// .
if (options.nestedProperties != undefined) {
for (let i = 0; i < options.nestedProperties.length; i++) {
const nestedProperty = options.nestedProperties[i];
if (typeof nestedProperty === "string") {
this._nestedProperties.push({name: nestedProperty, path: nestedProperty.split(".")});
} else {
this._nestedProperties.push(nestedProperty as { name: keyof TNested, path: string[] });
}
}
}
this.setTTL(options.ttl || -1, options.ttlInterval);
// events
this._events = {
"insert": [],
"update": [],
"pre-insert": [],
"pre-update": [],
"close": [],
"flushbuffer": [],
"error": [],
"delete": [],
"warning": []
};
// initialize the id index
this._ensureId();
let rangedIndexes: Collection.RangedIndexOptions = options.rangedIndexes || {};
for (let ri in rangedIndexes) {
// Todo: any way to type annotate this as typesafe generic?
this.ensureRangedIndex(ri, rangedIndexes[ri].indexTypeName, rangedIndexes[ri].comparatorName);
}
this.setChangesApi(this._disableChangesApi, this._disableDeltaChangesApi);
// for de-serialization purposes
this.flushChanges();
}
toJSON(): Collection.Serialized {
return {
name: this.name,
unindexedSortComparator: this._unindexedSortComparator,
defaultLokiOperatorPackage: this._defaultLokiOperatorPackage,
_dynamicViews: this._dynamicViews,
uniqueNames: Object.keys(this._constraints.unique),
transforms: this._transforms as any,
rangedIndexes: this._rangedIndexes as any,
_data: this._data,
idIndex: this._idIndex,
maxId: this._maxId,
_dirty: this._dirty,
_nestedProperties: this._nestedProperties,
transactional: this._transactional,
asyncListeners: this._asyncListeners,
disableMeta: this._disableMeta,
disableChangesApi: this._disableChangesApi,
disableDeltaChangesApi: this._disableDeltaChangesApi,
cloneObjects: this._cloneObjects,
cloneMethod: this._cloneMethod,
changes: this._changes,
_fullTextSearch: this._fullTextSearch
};
}
static fromJSONObject(obj: Collection.Serialized, options?: Collection.DeserializeOptions) {
// instantiate collection with options needed by constructor
let coll = new Collection<any>(obj.name, {
disableChangesApi: obj.disableChangesApi,
disableDeltaChangesApi: obj.disableDeltaChangesApi,
unindexedSortComparator: obj.unindexedSortComparator,
defaultLokiOperatorPackage: obj.defaultLokiOperatorPackage
});
coll._transactional = obj.transactional;
coll._asyncListeners = obj.asyncListeners;
coll._disableMeta = obj.disableMeta;
coll._disableChangesApi = obj.disableChangesApi;
coll._cloneObjects = obj.cloneObjects;
coll._cloneMethod = obj.cloneMethod || "deep";
coll._changes = obj.changes;
coll._nestedProperties = obj._nestedProperties as any[];
coll._rangedIndexes = obj.rangedIndexes || {};
coll._dirty = (options && options.retainDirtyFlags === true) ? obj._dirty : false;
function makeLoader(coll: Collection.Serialized) {
const collOptions = options[coll.name];
if (collOptions.proto) {
const inflater = collOptions.inflate || ((src: Doc<any>, dest: Doc<any>) => {
for (let prop in src) {
dest[prop] = src[prop];
}
});
return (data: Doc<any>) => {
const collObj = new (collOptions.proto)();
inflater(data, collObj);
return collObj;
};
}
return collOptions.inflate;
}
// load each element individually
if (options && options[obj.name] !== undefined) {
let loader = makeLoader(obj);
for (let j = 0; j < obj._data.length; j++) {
coll._data[j] = coll._defineNestedProperties(loader(obj._data[j]));
// regenerate lokimap
coll._lokimap[coll._data[j].$loki] = coll._data[j];
}
} else {
for (let j = 0; j < obj._data.length; j++) {
coll._data[j] = coll._defineNestedProperties(obj._data[j]);
// regenerate lokimap
coll._lokimap[coll._data[j].$loki] = coll._data[j];
}
}
coll._maxId = (obj.maxId === undefined) ? 0 : obj.maxId;
coll._idIndex = obj.idIndex;
if (obj.transforms !== undefined) {
coll._transforms = obj.transforms;
}
// inflate rangedindexes
for (let ri in obj.rangedIndexes) {
// shortcut reference to serialized meta
let sri = obj.rangedIndexes[ri];
// lookup index factory function in map based on index type name
let rif = RangedIndexFactoryMap[sri.indexTypeName];
// lookup comparator function in map based on comparator name
let ricmp = ComparatorMap[sri.comparatorName];
// using index type (from meta), index factory and comparator... create instance of ranged index
let rii = rif(ri, ricmp);
// now ask new index instance to inflate from plain object
rii.restore(sri.index);
// attach class instance to our collection's ranged index's (index) instance property
coll._rangedIndexes[ri].index = rii;
}
coll._ensureId();
// regenerate unique indexes
if (obj.uniqueNames !== undefined) {
for (let j = 0; j < obj.uniqueNames.length; j++) {
coll.ensureUniqueIndex(obj.uniqueNames[j]);
}
}
// in case they are loading a database created before we added dynamic views, handle undefined
if (obj._dynamicViews !== undefined) {
// reinflate DynamicViews and attached ResultSets
for (let idx = 0; idx < obj._dynamicViews.length; idx++) {
coll._dynamicViews.push(DynamicView.fromJSONObject(coll, obj._dynamicViews[idx] as any));
}
}
if (obj._fullTextSearch) {
coll._fullTextSearch = PLUGINS["FullTextSearch"].fromJSONObject(obj._fullTextSearch, options.fullTextSearch);
}
return coll;
}
/**
* Adds a named collection transform to the collection
* @param {string} name - name to associate with transform
* @param {array} transform - an array of transformation 'step' objects to save into the collection
*/
public addTransform(name: string, transform: Collection.Transform<T>[]): void {
if (this._transforms[name] !== undefined) {
throw new Error("a transform by that name already exists");
}
this._transforms[name] = transform;
}
/**
* Retrieves a named transform from the collection.
* @param {string} name - name of the transform to lookup.
*/
public getTransform(name: string): Collection.Transform<T>[] {
return this._transforms[name];
}
/**
* Updates a named collection transform to the collection
* @param {string} name - name to associate with transform
* @param {object} transform - a transformation object to save into collection
*/
public setTransform(name: string, transform: Collection.Transform<T>[]): void {
this._transforms[name] = transform;
}
/**
* Removes a named collection transform from the collection
* @param {string} name - name of collection transform to remove
*/
public removeTransform(name: string): void {
delete this._transforms[name];
}
/*----------------------------+
| TTL |
+----------------------------*/
private setTTL(age: number, interval: number): void {
if (age < 0) {
clearInterval(this._ttl.daemon);
} else {
this._ttl.age = age;
this._ttl.ttlInterval = interval;
this._ttl.daemon = setInterval(() => {
const now = Date.now();
const toRemove = this.chain().where((member: Doc<T>) => {
const timestamp = member.meta.updated || member.meta.created;
const diff = now - timestamp;
return this._ttl.age < diff;
});
toRemove.remove();
}, interval);
}
}
/*----------------------------+
| INDEXING |
+----------------------------*/
/**
* Create a row filter that covers all documents in the collection.
*/
_prepareFullDocIndex(): number[] {
const indexes = new Array(this._data.length);
for (let i = 0; i < indexes.length; i++) {
indexes[i] = i;
}
return indexes;
}
/**
* Ensure rangedIndex of a field.
* @param field
* @param indexTypeName
* @param comparatorName
*/
public ensureIndex(field: string, indexTypeName?: string, comparatorName?: string) {
this.ensureRangedIndex(field, indexTypeName, comparatorName);
}
/**
* Ensure rangedIndex of a field.
* @param field Property to create an index on (need to look into contraining on keyof T)
* @param indexTypeName Name of IndexType factory within (global?) hashmap to create IRangedIndex from
* @param comparatorName Name of Comparator within (global?) hashmap
*/
public ensureRangedIndex(field: string, indexTypeName?: string, comparatorName?: string) {
indexTypeName = indexTypeName || "avl";
comparatorName = comparatorName || "loki";
if (!RangedIndexFactoryMap[indexTypeName]) {
throw new Error("ensureRangedIndex: Unknown range index type");
}
if (!ComparatorMap[comparatorName]) {
throw new Error("ensureRangedIndex: Unknown comparator");
}
let rif = RangedIndexFactoryMap[indexTypeName];
let comparator = ComparatorMap[comparatorName];
this._rangedIndexes[field] = {
index: rif(field, comparator),
indexTypeName: indexTypeName,
comparatorName: comparatorName
};
let rii = this._rangedIndexes[field].index;
for (let i = 0; i < this._data.length; i++) {
rii.insert(this._data[i].$loki, this._data[i][field]);
}
}
public ensureUniqueIndex(field: keyof T) {
let index = new UniqueIndex(field as string);
// if index already existed, (re)loading it will likely cause collisions, rebuild always
this._constraints.unique[field] = index;
for (let i = 0; i < this._data.length; i++) {
index.set(this._data[i].$loki, this._data[i][field]);
}
return index;
}
/**
* Quickly determine number of documents in collection (or query)
* @param {object} query - (optional) query object to count results of
* @returns {number} number of documents in the collection
*/
public count(query?: ResultSet.Query<Doc<T>>): number {
if (!query) {
return this._data.length;
}
return this.chain().find(query)._filteredRows.length;
}
/**
* Rebuild idIndex
*/
private _ensureId(): void {
this._idIndex = [];
for (let i = 0; i < this._data.length; i++) {
this._idIndex.push(this._data[i].$loki);
}
}
/**
* Add a dynamic view to the collection
* @param {string} name - name of dynamic view to add
* @param {object} options - (optional) options to configure dynamic view with
* @param {boolean} [options.persistent=false] - indicates if view is to main internal results array in 'resultdata'
* @param {string} [options.sortPriority=SortPriority.PASSIVE] - the sort priority
* @param {number} options.minRebuildInterval - minimum rebuild interval (need clarification to docs here)
* @returns {DynamicView} reference to the dynamic view added
**/
public addDynamicView(name: string, options?: DynamicView.Options): DynamicView<T> {
const dv = new DynamicView<T>(this as any as Collection<T>, name, options);
this._dynamicViews.push(dv);
return dv;
}
/**
* Remove a dynamic view from the collection
* @param {string} name - name of dynamic view to remove
**/
public removeDynamicView(name: string): void {
for (let idx = 0; idx < this._dynamicViews.length; idx++) {
if (this._dynamicViews[idx].name === name) {
this._dynamicViews.splice(idx, 1);
}
}
}
/**
* Look up dynamic view reference from within the collection
* @param {string} name - name of dynamic view to retrieve reference of
* @returns {DynamicView} A reference to the dynamic view with that name
**/
public getDynamicView(name: string): DynamicView<T> {
for (let idx = 0; idx < this._dynamicViews.length; idx++) {
if (this._dynamicViews[idx].name === name) {
return this._dynamicViews[idx];
}
}
return null;
}
/**
* Applies a 'mongo-like' find query object and passes all results to an update function.
* @param {object} filterObject - the 'mongo-like' query object
* @param {function} updateFunction - the update function
*/
public findAndUpdate(filterObject: ResultSet.Query<Doc<T>>, updateFunction: (obj: Doc<T>) => any) {
this.chain().find(filterObject).update(updateFunction);
}
/**
* Applies a 'mongo-like' find query object removes all documents which match that filter.
* @param {object} filterObject - 'mongo-like' query object
*/
public findAndRemove(filterObject: ResultSet.Query<Doc<T>>) {
this.chain().find(filterObject).remove();
}
/**
* Adds object(s) to collection, ensure object(s) have meta properties, clone it if necessary, etc.
* @param {(object|array)} doc - the document (or array of documents) to be inserted
* @returns {(object|array)} document or documents inserted
*/
public insert(doc: TData): Doc<T>;
public insert(doc: TData[]): Doc<T>[];
public insert(doc: TData | TData[]): Doc<T> | Doc<T>[] {
if (!Array.isArray(doc)) {
return this.insertOne(doc);
}
// holder to the clone of the object inserted if collections is set to clone objects
let obj;
let results = [];
this.emit("pre-insert", doc);
for (let i = 0; i < doc.length; i++) {
obj = this.insertOne(doc[i], true);
if (!obj) {
return undefined;
}
results.push(obj);
}
// at the 'batch' level, if clone option is true then emitted docs are clones
this.emit("insert", results);
// if clone option is set, clone return values
results = this._cloneObjects ? clone(results, this._cloneMethod) : results;
return results.length === 1 ? results[0] : results;
}
/**
* Adds a single object, ensures it has meta properties, clone it if necessary, etc.
* @param {object} doc - the document to be inserted
* @param {boolean} bulkInsert - quiet pre-insert and insert event emits
* @returns {object} document or 'undefined' if there was a problem inserting it
*/
public insertOne(doc: TData, bulkInsert = false): Doc<T> {
let err = null;
let returnObj;
if (typeof doc !== "object") {
err = new TypeError("Document needs to be an object");
} else if (doc === null) {
err = new TypeError("Object cannot be null");
}
if (err !== null) {
this.emit("error", err);
throw err;
}
// if configured to clone, do so now... otherwise just use same obj reference
const obj = this._defineNestedProperties(this._cloneObjects ? clone(doc, this._cloneMethod) : doc) as T;
if (!this._disableMeta && (obj as Doc<TData>).meta === undefined) {
(obj as Doc<TData>).meta = {
version: 0,
revision: 0,
created: 0
};
}
// both 'pre-insert' and 'insert' events are passed internal data reference even when cloning
// insert needs internal reference because that is where loki itself listens to add meta
if (!bulkInsert) {
this.emit("pre-insert", obj);
}
if (!this._add(obj)) {
return undefined;
}
// update meta and store changes if ChangesAPI is enabled
// (moved from "insert" event listener to allow internal reference to be used)
if (this._disableChangesApi) {
this._insertMeta(obj as Doc<TData>);
} else {
this._insertMetaWithChange(obj as Doc<TData>);
}
// if cloning is enabled, emit insert event with clone of new object
returnObj = this._cloneObjects ? clone(obj, this._cloneMethod) : obj;
if (!bulkInsert) {
this.emit("insert", returnObj);
}
return returnObj as Doc<T>;
}
/**
* Refers nested properties of an object to the root of it.
* @param {T} data - the object
* @returns {T & TNested} the object with nested properties
* @hidden
*/
_defineNestedProperties<U extends TData>(data: U): U & TNested {
for (let i = 0; i < this._nestedProperties.length; i++) {
const name = this._nestedProperties[i].name;
const path = this._nestedProperties[i].path;
Object.defineProperty(data, name, {
get() {
// Get the value of the nested property.
const array: any[] = [];
if (getNestedPropertyValue(this, path, array)) {
return array;
} else {
return array[0];
}
},
set(val: any) {
// Set the value of the nested property.
path.slice(0, path.length - 1).reduce((obj: any, part: string) =>
(obj && obj[part]) ? obj[part] : null, this)[path[path.length - 1]] = val;
},
enumerable: false,
configurable: true
});
}
return data as U & TNested;
}
/**
* Empties the collection.
* @param {boolean} [removeIndices=false] - remove indices
*/
public clear({removeIndices: removeIndices = false} = {}) {
this._data = [];
this._idIndex = [];
this._cached = null;
this._maxId = 0;
this._dynamicViews = [];
this._dirty = true;
// if removing indices entirely
if (removeIndices === true) {
this._rangedIndexes = {};
this._constraints = {
unique: {}
};
}
// clear indices but leave definitions in place
else {
// re-instance ranged indexes
for (let ri in this._rangedIndexes) {
this.ensureRangedIndex(ri, this._rangedIndexes[ri].indexTypeName, this._rangedIndexes[ri].comparatorName);
}
// clear entire unique indices definition
const uniqueNames = Object.keys(this._constraints.unique);
for (let i = 0; i < uniqueNames.length; i++) {
this._constraints.unique[uniqueNames[i]].clear();
}
}
if (this._fullTextSearch !== null) {
this._fullTextSearch.clear();
}
}
/**
* Updates an object and notifies collection that the document has changed.
* @param {object} doc - document to update within the collection
*/
public update(doc: Doc<T> | Doc<T>[]): void {
if (Array.isArray(doc)) {
for (let i = 0; i < doc.length; i++) {
this.update(doc[i]);
}
return;
}
// Verify object is a properly formed document.
if (doc.$loki === undefined) {
throw new Error("Trying to update unsynced document. Please save the document first by using insert() or addMany()");
}
try {
this.startTransaction();
const arr = this.get(doc.$loki, true);
if (!arr) {
throw new Error("Trying to update a document not in collection.");
}
// ref to existing obj
let oldInternal = arr[0]; // -internal- obj ref
let position = arr[1]; // position in data array
// ref to new internal obj
// if configured to clone, do so now... otherwise just use same obj reference
let newInternal = this._defineNestedProperties(this._cloneObjects || !this._disableDeltaChangesApi ? clone(doc, this._cloneMethod) : doc);
this.emit("pre-update", doc);
Object.keys(this._constraints.unique).forEach((key) => {
this._constraints.unique[key].update(newInternal.$loki, newInternal[key]);
});
// operate the update
this._data[position] = newInternal;
this._lokimap[doc.$loki] = newInternal;
// now that we can efficiently determine the data[] position of newly added document,
// submit it for all registered DynamicViews to evaluate for inclusion/exclusion
for (let idx = 0; idx < this._dynamicViews.length; idx++) {
this._dynamicViews[idx]._evaluateDocument(position, false);
}
// Notify all ranged indexes of (possible) value update
for (let ri in this._rangedIndexes) {
this._rangedIndexes[ri].index.update(doc.$loki, doc[ri]);
}
this._idIndex[position] = newInternal.$loki;
// FullTextSearch.
if (this._fullTextSearch !== null) {
this._fullTextSearch.updateDocument(doc, position);
}
this.commit();
this._dirty = true; // for autosave scenarios
// update meta and store changes if ChangesAPI is enabled
if (this._disableChangesApi) {
this._updateMeta(newInternal);
}
else {
this._updateMetaWithChange(newInternal, oldInternal);
}
let returnObj = newInternal;
// if cloning is enabled, emit 'update' event and return with clone of new object
if (this._cloneObjects) {
returnObj = clone(newInternal, this._cloneMethod);
}
this.emit("update", returnObj, oldInternal);
} catch (err) {
this.rollback();
this.emit("error", err);
throw (err); // re-throw error so user does not think it succeeded
}
}
/**
* Add object to collection
*/
private _add(obj: T) {
// if parameter isn't object exit with throw
if ("object" !== typeof obj) {
throw new TypeError("Object being added needs to be an object");
}
// if object you are adding already has id column it is either already in the collection
// or the object is carrying its own 'id' property. If it also has a meta property,
// then this is already in collection so throw error, otherwise rename to originalId and continue adding.
if (obj["$loki"] !== undefined) {
throw new Error("Document is already in collection, please use update()");
}
/*
* try adding object to collection
*/
try {
this.startTransaction();
this._maxId++;
if (isNaN(this._maxId)) {
this._maxId = (this._data[this._data.length - 1].$loki + 1);
}
const newDoc = obj as Doc<T>;
newDoc.$loki = this._maxId;
if (!this._disableMeta) {
newDoc.meta.version = 0;
}
const constrUnique = this._constraints.unique;
for (const key in constrUnique) {
if (constrUnique[key] !== undefined) {
constrUnique[key].set(newDoc.$loki, newDoc[key]);
}
}
// add new obj id to idIndex
this._idIndex.push(newDoc.$loki);
// update lokimap
this._lokimap[newDoc.$loki] = newDoc;
// add the object
this._data.push(newDoc);
const addedPos = this._data.length - 1;
// now that we can efficiently determine the data[] position of newly added document,
// submit it for all registered DynamicViews to evaluate for inclusion/exclusion
const dvlen = this._dynamicViews.length;
for (let i = 0; i < dvlen; i++) {
this._dynamicViews[i]._evaluateDocument(addedPos, true);
}
// add id/val kvp to ranged index
for (let ri in this._rangedIndexes) {
// ensure Dates are converted to unix epoch time if serializableIndexes is true
if (this._serializableIndexes && newDoc[ri] instanceof Date) {
newDoc[ri] = newDoc[ri].getTime();
}
this._rangedIndexes[ri].index.insert(obj["$loki"], obj[ri]);
}
// FullTextSearch.
if (this._fullTextSearch !== null) {
this._fullTextSearch.addDocument(newDoc, addedPos);
}
this.commit();
this._dirty = true; // for autosave scenarios
return (this._cloneObjects) ? (clone(newDoc, this._cloneMethod)) : (newDoc);
} catch (err) {
this.rollback();
this.emit("error", err);
throw (err); // re-throw error so user does not think it succeeded
}
}
/**
* Applies a filter function and passes all results to an update function.
* @param {function} filterFunction - the filter function
* @param {function} updateFunction - the update function
*/
updateWhere(filterFunction: (obj: Doc<T>) => boolean, updateFunction: (obj: Doc<T>) => Doc<T>) {
const results = this.where(filterFunction);
try {
for (let i = 0; i < results.length; i++) {
this.update(updateFunction(results[i]));
}
} catch (err) {
this.rollback();
throw err;
}
}
/**
* Remove all documents matching supplied filter function.
* @param {function} filterFunction - the filter function
*/
public removeWhere(filterFunction: (obj: Doc<T>) => boolean) {
this.remove(this._data.filter(filterFunction));
}
public removeDataOnly() {
this.remove(this._data.slice());
}
/**
* Remove a document from the collection
* @param {number|object} doc - document to remove from collection
*/
remove(doc: number | Doc<T> | Doc<T>[]): void {
if (typeof doc === "number") {
doc = this.get(doc);
}
if (Array.isArray(doc)) {
let k = 0;
const len = doc.length;
for (k; k < len; k++) {
this.remove(doc[k]);
}
return;
}
if (doc.$loki === undefined) {
throw new Error("Object is not a document stored in the collection");
}
try {
this.startTransaction();
const arr = this.get(doc.$loki, true);
const position = arr[1];
// already converted but let's narrow to make typescript happy
let aDoc : Doc<T> = (typeof doc === "number") ? this.get(doc) : doc;
Object.keys(this._constraints.unique).forEach((key) => {
if (key in aDoc) {
this._constraints.unique[key].remove(aDoc.$loki);
}
});
// now that we can efficiently determine the data[] position of newly added document,
// submit it for all registered DynamicViews to remove
for (let idx = 0; idx < this._dynamicViews.length; idx++) {
this._dynamicViews[idx]._removeDocument(position);
}
this._data.splice(position, 1);
// remove id from idIndex
this._idIndex.splice(position, 1);
// remove from lokimap
delete this._lokimap[doc.$loki];
// remove id/val kvp from binary tree index
for (let ri in this._rangedIndexes) {
this._rangedIndexes[ri].index.remove(doc.$loki);
}
// FullTextSearch.
if (this._fullTextSearch !== null) {
this._fullTextSearch.removeDocument(doc, position);
}
this.commit();
this._dirty = true; // for autosave scenarios
if (!this._disableChangesApi) {
this._createChange(this.name, "R", arr[0]);
}
this.emit("delete", arr[0]);
delete doc.$loki;
delete doc.meta;
} catch (err) {
this.rollback();
this.emit("error", err);
throw err;
}
}
/*------------+
| Change API |
+------------*/
/**
* Returns all changes.
* @returns {Collection.Change[]}
*/
public getChanges(): Collection.Change[] {
return this._changes;
}
/**
* Enables/disables changes api.
* @param {boolean} disableChangesApi
* @param {boolean} disableDeltaChangesApi
*/
public setChangesApi(disableChangesApi: boolean, disableDeltaChangesApi: boolean = true) {
this._disableChangesApi = disableChangesApi;
this._disableDeltaChangesApi = disableChangesApi ? true : disableDeltaChangesApi;
}
/**
* Clears all the changes.
*/
public flushChanges() {
this._changes = [];
}
private _getObjectDelta(oldObject: Doc<TData>, newObject: Doc<TData>) {
const propertyNames = newObject !== null && typeof newObject === "object" ? Object.keys(newObject) : null;
if (propertyNames && propertyNames.length && ["string", "boolean", "number"].indexOf(typeof(newObject)) < 0) {
const delta = {};
for (let i = 0; i < propertyNames.length; i++) {
const propertyName = propertyNames[i];
if (newObject.hasOwnProperty(propertyName)) {
if (!oldObject.hasOwnProperty(propertyName) || this._constraints.unique[propertyName] !== undefined
|| propertyName === "$loki" || propertyName === "meta") {
delta[propertyName] = newObject[propertyName];
} else {
const propertyDelta = this._getObjectDelta(oldObject[propertyName], newObject[propertyName]);
if (propertyDelta !== undefined && propertyDelta !== {}) {
delta[propertyName] = propertyDelta;
}
}
}
}
return Object.keys(delta).length === 0 ? undefined : delta;
} else {
return oldObject === newObject ? undefined : newObject;
}
}
/**
* Compare changed object (which is a forced clone) with existing object and return the delta
*/
private _getChangeDelta(obj: Doc<TData>, old: Doc<TData>) {
if (old) {
return this._getObjectDelta(old, obj);
} else {
return JSON.parse(JSON.stringify(obj));
}
}
/**
* Creates a clone of the current status of an object and associates operation and collection name,
* so the parent db can aggregate and generate a changes object for the entire db
*/
private _createChange(name: string, op: string, obj: Doc<TData>, old?: Doc<TData>) {
this._changes.push({
name,
operation: op,
obj: op === "U" && !this._disableDeltaChangesApi
? this._getChangeDelta(obj, old)
: JSON.parse(JSON.stringify(obj))
});
}
private _createInsertChange(obj: Doc<TData>) {
this._createChange(this.name, "I", obj);
}
private _createUpdateChange(obj: Doc<TData>, old: Doc<TData>) {
this._createChange(this.name, "U", obj, old);
}
private _insertMetaWithChange(obj: Doc<TData>) {
this._insertMeta(obj);
this._createInsertChange(obj);
}
private _updateMetaWithChange(obj: Doc<TData>, old: Doc<TData>) {
this._updateMeta(obj);
this._createUpdateChange(obj, old);
}
private _insertMeta(obj: Doc<TData>) {
if (this._disableMeta) {
return;
}
if (!obj.meta) {
obj.meta = {
version: 0,
revision: 0,
created: 0
};
}
obj.meta.created = (new Date()).getTime();
obj.meta.revision = 0;
}
private _updateMeta(obj: Doc<TData>) {
if (this._disableMeta) {
return;
}
obj.meta.updated = (new Date()).getTime();
obj.meta.revision += 1;
}
/*---------------------+
| Finding methods |
+----------------------*/
/**
* Get by Id - faster than other methods because of the searching algorithm
* @param {int} id - $loki id of document you want to retrieve
* @param {boolean} returnPosition - if 'true' we will return [object, position]
* @returns {(object|array|null)} Object reference if document was found, null if not,
* or an array if 'returnPosition' was passed.
*/
public get(id: number): Doc<T>;
public get(id: number, returnPosition: boolean): Doc<T> | [Doc<T>, number];
public get(id: number, returnPosition = false) {
if (!returnPosition) {
let doc = this._lokimap[id];
if (doc === undefined) return null;
return doc;
}
const data = this._idIndex;
let max = data.length - 1;
let min = 0;
let mid = (min + max) >> 1;
id = typeof id === "number" ? id : parseInt(id, 10);
if (isNaN(id)) {
throw new TypeError("Passed id is not an integer");
}
while (data[min] < data[max]) {
mid = (min + max) >> 1;
if (data[mid] < id) {
min = mid + 1;
} else {
max = mid;
}
}
if (max === min && data[min] === id) {
if (returnPosition) {
return [this._data[min], min];
}
return this._data[min];
}
return null;
}
/**
* Retrieve doc by Unique index
* @param {string} field - name of uniquely indexed property to use when doing lookup
* @param {any} value - unique value to search for
* @returns {object} document matching the value passed
*/
public by(field: keyof T, value: any): Doc<T> {
// for least amount of overhead, we will directly
// access index rather than use find codepath
let lokiId = this._constraints.unique[field].get(value);
if (!this._cloneObjects) {
return this._lokimap[lokiId];
} else {
return clone(this._lokimap[lokiId], this._cloneMethod);
}
}
/**
* Find one object by index property, by property equal to value
* @param {object} query - query object used to perform search with
* @returns {(object|null)} First matching document, or null if none
*/
public findOne(query: ResultSet.Query<Doc<T>>): Doc<T> {
query = query || {};
// Instantiate ResultSet and exec find op passing firstOnly = true param
const result = this.chain().find(query, true).data();
if (Array.isArray(result) && result.length === 0) {
return null;
} else {
if (!this._cloneObjects) {
return result[0];
} else {
return clone(result[0], this._cloneMethod);
}
}
}
/**
* Chain method, used for beginning a series of chained find() and/or view() operations
* on a collection.
*
* @param {array} transform - Ordered array of transform step objects similar to chain
* @param {object} parameters - Object containing properties representing parameters to substitute
* @returns {ResultSet} (this) ResultSet, or data array if any map or join functions where called
*/
public chain(transform?: string | Collection.Transform<T>[], parameters?: object): ResultSet<T> {
const rs = new ResultSet<T>(this as any as Collection<T>);
if (transform === undefined) {
return rs;
}
return rs.transform(transform, parameters);
}
/**
* Find method, api is similar to mongodb.
* for more complex queries use [chain()]{@link Collection#chain} or [where()]{@link Collection#where}.
* @example {@tutorial Query Examples}
* @param {object} query - 'mongo-like' query object
* @returns {array} Array of matching documents
*/
public find(query?: ResultSet.Query<Doc<T>>): Doc<T>[] {
return this.chain().find(query).data();
}
/**
* Find object by unindexed field by property equal to value,
* simply iterates and returns the first element matching the query
*/
public findOneUnindexed(prop: string, value: any) {
let i = this._data.length;
let doc;
while (i--) {
if (this._data[i][prop] === value) {
doc = this._data[i];
return doc;
}
}
return null;
}
/**
* Transaction methods
*/
/**
* start the transation
*/
public startTransaction(): void {
if (this._transactional) {
// backup any ranged indexes
let rib: { [name: string]: Collection.RangedIndexMeta } = {};
for (let ri in this._rangedIndexes) {
rib[ri].indexTypeName = this._rangedIndexes[ri].indexTypeName;
rib[ri].comparatorName = this._rangedIndexes[ri].comparatorName;
rib[ri].index = this._rangedIndexes[ri].index.backup();
}
this._cached = {
index: this._idIndex,
data: clone(this._data, this._cloneMethod),
rangedIndexes: rib,
};
// propagate startTransaction to dynamic views
for (let idx = 0; idx < this._dynamicViews.length; idx++) {
this._dynamicViews[idx].startTransaction();
}
}
}
/**
* Commit the transaction.
*/
public commit(): void {
if (this._transactional) {
this._cached = null;
// propagate commit to dynamic views
for (let idx = 0; idx < this._dynamicViews.length; idx++) {
this._dynamicViews[idx].commit();
}
}
}
/**
* Rollback the transaction.
*/
public rollback(): void {
if (this._transactional) {
if (this._cached !== null) {
this._idIndex = this._cached.index;
this._data = this._cached.data;
for (let i = 0; i < this._data.length; i++) {
this._data[i] = this._defineNestedProperties(this._data[i]);
}
// restore ranged indexes
for (let ri in this._cached.rangedIndexes) {
// shortcut reference to serialized meta
let sri = this._cached.rangedIndexes[ri];
// lookup index factory function in map based on index type name
let rif = RangedIndexFactoryMap[sri.indexTypeName];
// lookup comparator function in map based on comparator name
let ricmp = ComparatorMap[sri.comparatorName];
// using index type (from meta), index factory and comparator... create instance of ranged index
let rii = rif(ri, ricmp);
// now ask new index instance to inflate from plain object
rii.restore(sri.index);
// attach class instance to our collection's ranged index's (index) instance property
this._rangedIndexes[ri].index = rii;
}
// propagate rollback to dynamic views
for (let idx = 0; idx < this._dynamicViews.length; idx++) {
this._dynamicViews[idx].rollback();
}
}
}
}
/**
* Query the collection by supplying a javascript filter function.
* @example
* let results = coll.where(function(obj) {
* return obj.legs === 8;
* });
* @param {function} fun - filter function to run against all collection docs
* @returns {array} all documents which pass your filter function
*/
public where(fun: (obj: Doc<T>) => boolean): Doc<T>[] {
return this.chain().where(fun).data();
}
/**
* Map Reduce operation
* @param {function} mapFunction - function to use as map function
* @param {function} reduceFunction - function to use as reduce function
* @returns {data} The result of your mapReduce operation
*/
public mapReduce<U1, U2>(mapFunction: (value: Doc<T>, index: number, array: Doc<T>[]) => U1, reduceFunction: (array: U1[]) => U2): U2 {
return reduceFunction(this._data.map(mapFunction));
}
/**
* Join two collections on specified properties
* @param {array} joinData - array of documents to 'join' to this collection
* @param {string} leftJoinProp - property name in collection
* @param {string} rightJoinProp - property name in joinData
* @param {function} mapFun - (Optional) map function to use
* @param dataOptions - options to data() before input to your map function
* @param [dataOptions.removeMeta] - allows removing meta before calling mapFun
* @param [dataOptions.forceClones] - forcing the return of cloned objects to your map object
* @param [dataOptions.forceCloneMethod] - allows overriding the default or collection specified cloning method
* @returns {ResultSet} Result of the mapping operation
*/
public eqJoin(joinData: Collection<any> | ResultSet<any> | any[], leftJoinProp: string | ((obj: any) => string),
rightJoinProp: string | ((obj: any) => string), mapFun?: (left: any, right: any) => any,
dataOptions?: ResultSet.DataOptions): ResultSet<any> {
return new ResultSet(this).eqJoin(joinData, leftJoinProp, rightJoinProp, mapFun, dataOptions);
}
/* ------ STAGING API -------- */
/**
* (Staging API) create a stage and/or retrieve it
*/
getStage(name: string) {
if (!this._stages[name]) {
this._stages[name] = {};
}
return this._stages[name];
}
/**
* a collection of objects recording the changes applied through a commmitStage
*/
/**
* (Staging API) create a copy of an object and insert it into a stage
*/
public stage<F extends TData>(stageName: string, obj: Doc<F>): F {
const copy = JSON.parse(JSON.stringify(obj));
this.getStage(stageName)[obj.$loki] = copy;
return copy;
}
/**
* (Staging API) re-attach all objects to the original collection, so indexes and views can be rebuilt
* then create a message to be inserted in the commitlog
* @param {string} stageName - name of stage
* @param {string} message
*/
public commitStage(stageName: string, message: string) {
const stage = this.getStage(stageName);
const timestamp = new Date().getTime();
for (const prop in stage) {
this.update(stage[prop]);
this._commitLog.push({
timestamp,
message,
data: JSON.parse(JSON.stringify(stage[prop]))
});
}
this._stages[stageName] = {};
}
/**
* Returns all values of a field.
* @param {string} field - the field name
* @return {any}: the array of values
*/
public extract(field: keyof T): any[] {
const result = [];
for (let i = 0; i < this._data.length; i++) {
result.push(this._data[i][field]);
}
return result;
}
/**
* Finds the minimum value of a field.
* @param {string} field - the field name
* @return {number} the minimum value
*/
public min(field: keyof T): number {
return Math.min.apply(null, this.extractNumerical(field));
}
/**
* Finds the maximum value of a field.
* @param {string} field - the field name
* @return {number} the maximum value
*/
public max(field: keyof T): number {
return Math.max.apply(null, this.extractNumerical(field));
}
/**
* Finds the minimum value and its index of a field.
* @param {string} field - the field name
* @return {object} - index and value
*/
public minRecord(field: keyof T) {
const result = {
index: 0,
value: 0
};
if (this._data.length === 0) {
result.index = null;
result.value = null;
return result;
}
result.index = this._data[0].$loki;
result.value = parseFloat(this._data[0][field] as any);
for (let i = 1; i < this._data.length; i++) {
const val = parseFloat(this._data[i][field] as any);
if (result.value > val) {
result.value = val;
result.index = this._data[i].$loki;
}
}
return result;
}
/**
* Finds the maximum value and its index of a field.
* @param {string} field - the field name
* @return {object} - index and value
*/
public maxRecord(field: keyof T) {
const result = {
index: 0,
value: 0
};
if (this._data.length === 0) {
result.index = null;
result.value = null;
return result;
}
result.index = this._data[0].$loki;
result.value = parseFloat(this._data[0][field] as any);
for (let i = 1; i < this._data.length; i++) {
const val = parseFloat(this._data[i][field] as any);
if (result.value < val) {
result.value = val;
result.index = this._data[i].$loki;
}
}
return result;
}
/**
* Returns all values of a field as numbers (if possible).
* @param {string} field - the field name
* @return {number[]} - the number array
*/
public extractNumerical(field: keyof T) {
return this.extract(field).map(parseFloat).filter(Number).filter((n) => !(isNaN(n)));
}
/**
* Calculates the average numerical value of a field
* @param {string} field - the field name
* @returns {number} average of property in all docs in the collection
*/
public avg(field: keyof T): number {
return average(this.extractNumerical(field));
}
/**
* Calculate the standard deviation of a field.
* @param {string} field - the field name
* @return {number} the standard deviation
*/
public stdDev(field: keyof T): number {
return standardDeviation(this.extractNumerical(field));
}
/**
* Calculates the mode of a field.
* @param {string} field - the field name
* @return {number} the mode
*/
public mode(field: keyof T): number {
const dict = {};
const data = this.extractNumerical(field);
let mode = data[0];
let maxCount = -Infinity;
for (let i = 0; i < data.length; i++) {
const el = data[i];
if (dict[el]) {
dict[el]++;
} else {
dict[el] = 1;
}
if (dict[el] > maxCount) {
mode = el;
maxCount = dict[el];
}
}
return mode;
}
/**
* Calculates the median of a field.
* @param {string} field - the field name
* @return {number} the median
*/
public median(field: keyof T) {
const values = this.extractNumerical(field);
values.sort((a, b) => a - b);
const half = Math.floor(values.length / 2);
if (values.length % 2) {
return values[half];
} else {
return (values[half - 1] + values[half]) / 2.0;
}
}
}
export namespace Collection {
export interface Options<TData extends object, TNested extends object = {}, T extends object = TData & TNested> {
unique?: (keyof T)[];
unindexedSortComparator?: string;
defaultLokiOperatorPackage?: string;
rangedIndexes?: RangedIndexOptions;
serializableIndexes?: boolean;
asyncListeners?: boolean;
disableMeta?: boolean;
disableChangesApi?: boolean;
disableDeltaChangesApi?: boolean;
clone?: boolean;
serializableIndices?: boolean;
cloneMethod?: CloneMethod;
transactional?: boolean;
ttl?: number;
ttlInterval?: number;
nestedProperties?: (keyof TNested | { name: keyof TNested, path: string[] })[];
fullTextSearch?: FullTextSearch.FieldOptions[];
}
export interface RangedIndexOptions {
[prop: string]: RangedIndexMeta;
}
export interface DeserializeOptions {
retainDirtyFlags?: boolean;
fullTextSearch?: Dict<Analyzer>;
[collName: string]: any | { proto?: any; inflate?: (src: object, dest?: object) => void };
}
export interface BinaryIndex {
dirty: boolean;
values: any;
}
export interface RangedIndexMeta {
index?: IRangedIndex<any>;
indexTypeName?: string;
comparatorName?: string;
}
export interface Change {
name: string;
operation: string;
obj: any;
}
export interface Serialized {
name: string;
unindexedSortComparator: string;
defaultLokiOperatorPackage: string;
_dynamicViews: DynamicView[];
_nestedProperties: { name: string, path: string[] }[];
uniqueNames: string[];
transforms: Dict<Transform[]>;
rangedIndexes: RangedIndexOptions;
_data: Doc<any>[];
idIndex: number[];
maxId: number;
_dirty: boolean;
transactional: boolean;
asyncListeners: boolean;
disableMeta: boolean;
disableChangesApi: boolean;
disableDeltaChangesApi: boolean;
cloneObjects: boolean;
cloneMethod: CloneMethod;
changes: any;
_fullTextSearch: FullTextSearch;
}
export interface CheckIndexOptions {
randomSampling?: boolean;
randomSamplingFactor?: number;
repair?: boolean;
}
export type Transform<T extends object = object> = {
type: "find";
value: ResultSet.Query<Doc<T>> | string;
} | {
type: "where";
value: ((obj: Doc<T>) => boolean) | string;
} | {
type: "simplesort";
property: keyof T;
options?: boolean | ResultSet.SimpleSortOptions;
} | {
type: "compoundsort";
value: (keyof T | [keyof T, boolean])[];
} | {
type: "sort";
value: (a: Doc<T>, b: Doc<T>) => number;
} | {
type: "sortByScoring";
desc?: boolean;
} | {
type: "limit";
value: number;
} | {
type: "offset";
value: number;
} | {
type: "map";
value: (obj: Doc<T>, index: number, array: Doc<T>[]) => any;
dataOptions?: ResultSet.DataOptions;
} | {
type: "eqJoin";
joinData: Collection<any> | ResultSet<any>;
leftJoinKey: string | ((obj: any) => string);
rightJoinKey: string | ((obj: any) => string);
mapFun?: (left: any, right: any) => any;
dataOptions?: ResultSet.DataOptions;
} | {
type: "mapReduce";
mapFunction: (item: Doc<T>, index: number, array: Doc<T>[]) => any;
reduceFunction: (array: any[]) => any;
} | {
type: "update";
value: (obj: Doc<T>) => any;
} | {
type: "remove";
};
export interface TTL {
age: number;
ttlInterval: number;
daemon: any; // setInterval Timer
}
} | the_stack |
import { commonStartEffect, releaseAllEffect, ports } from './common/initial'
import { appInstanceMap } from '../create_app'
import microApp from '..'
import { defer } from '../libs/utils'
describe('micro_app_element', () => {
let appCon: Element
beforeAll(() => {
commonStartEffect(ports.micro_app_element)
appCon = document.querySelector('#app-container')!
window.keepAliveListener = jest.fn()
microApp.start({
preFetchApps: [
{
name: 'test-app1',
url: `http://127.0.0.1:${ports.micro_app_element}/common`,
},
{
name: 'test-app12',
url: `http://127.0.0.1:${ports.micro_app_element}/common`,
},
]
})
})
afterAll(() => {
return releaseAllEffect()
})
// 正常渲染
test('render app2 as usual', async () => {
const microAppElement2 = document.createElement('micro-app')
microAppElement2.setAttribute('name', 'test-app2')
microAppElement2.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/ssr-render/`)
microAppElement2.setAttribute('baseurl', '/baseurl')
appCon.appendChild(microAppElement2)
await new Promise((resolve) => {
microAppElement2.addEventListener('mounted', () => {
// expect(appInstanceMap.size).toBe(3) // 由于预加载执行时机不确定,appInstanceMap.size也不确定
resolve(true)
}, false)
})
})
// 当新的app与旧的app name相同而url不同时,且旧app为预加载,则删除旧app的缓存,使用新app覆盖
test('app3 has same name with prefetch app1 but the url is different', () => {
const microAppElement3 = document.createElement('micro-app')
microAppElement3.setAttribute('name', 'test-app1')
microAppElement3.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/ssr-render/`)
appCon.appendChild(microAppElement3)
expect(console.warn).toHaveBeenCalled()
})
// name冲突
test('app4 has same name with app2 but the url is different', () => {
const microAppElement4 = document.createElement('micro-app')
microAppElement4.setAttribute('name', 'test-app2')
microAppElement4.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/common/`)
appCon.appendChild(microAppElement4)
expect(console.error).toHaveBeenCalledWith('[micro-app] app test-app2: app name conflict, an app named test-app2 is running')
})
// 非法url
test('it should log error when url is invalid', () => {
const microAppElement5 = document.createElement('micro-app')
microAppElement5.setAttribute('name', 'test-app2')
microAppElement5.setAttribute('url', 'abc')
appCon.appendChild(microAppElement5)
expect(console.error).toBeCalledTimes(2)
})
// 修改name或url失败
test('it should deal with an error when change name or url failed', async () => {
const microAppElement6 = document.createElement('micro-app')
microAppElement6.setAttribute('name', 'test-app6')
microAppElement6.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/common/`)
microAppElement6.setAttribute('name', 'test-app2')
await new Promise((resolve) => {
defer(() => {
expect(console.error).toBeCalledWith('[micro-app] app test-app6: app name conflict, an app named test-app2 is running')
expect(microAppElement6.getAttribute('name')).toBe('test-app6')
resolve(true)
})
})
microAppElement6.setAttribute('name', 'test-app2')
microAppElement6.setAttribute('url', 'abc')
await new Promise((resolve) => {
defer(() => {
expect(console.error).toBeCalledTimes(3)
expect(microAppElement6.getAttribute('name')).toBe('test-app6')
resolve(true)
})
})
})
// 重复定义相同名称元素抛出警告
test('it should log warn when customElement already exists', () => {
microApp.start()
expect(console.warn).toBeCalledWith('[micro-app] element micro-app is already defined')
})
// 覆盖修改name/url属性的一些特殊分支
test('coverage special branch when change attribute name/url', async () => {
const microAppElement7 = document.createElement('micro-app')
microAppElement7.setAttribute('name', 'test-app7')
microAppElement7.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/common/`)
appCon.appendChild(microAppElement7)
await new Promise((resolve) => {
microAppElement7.addEventListener('mounted', () => {
resolve(true)
}, false)
})
microAppElement7.setAttribute('name', 'new-name') // 设置新name
microAppElement7.setAttribute('name', 'test-app7') // 之后立即恢复之前的值,因为回调是异步处理的,所以会发现属性name和实例名称name是一致的,以此来覆盖某个分支
await new Promise((resolve) => {
defer(() => {
expect(microAppElement7.getAttribute('name')).toBe('test-app7')
microAppElement7.setAttribute('name', 'new-name')
resolve(true)
})
})
const microAppElement8 = document.createElement('micro-app')
microAppElement8.setAttribute('name', 'test-app8')
microAppElement8.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/common/`)
appCon.appendChild(microAppElement8)
await new Promise((resolve) => {
microAppElement8.addEventListener('mounted', () => {
resolve(true)
}, false)
})
microAppElement8.setAttribute('url', 'abc') // 无效的url
await new Promise((resolve) => {
defer(() => {
expect(microAppElement8.getAttribute('url')).toBe('abc')
// @ts-ignore
expect(microAppElement8.appUrl).toBe(`http://127.0.0.1:${ports.micro_app_element}/common/`)
resolve(true)
})
})
appInstanceMap.delete('test-app8')
appCon.removeChild(microAppElement8)
})
// 重新渲染带有shadowDom和baseurl属性应用 -- 分支覆盖
test('coverage branch of remount app with shadowDom & baseurl', async () => {
const microAppElement10 = document.createElement('micro-app')
microAppElement10.setAttribute('name', 'test-app10')
microAppElement10.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/common/`)
microAppElement10.setAttribute('shadowDom', 'true')
microAppElement10.setAttribute('baseurl', '/baseurl')
appCon.appendChild(microAppElement10)
await new Promise((resolve) => {
microAppElement10.addEventListener('mounted', () => {
resolve(true)
}, false)
})
appCon.removeChild(microAppElement10)
appCon.appendChild(microAppElement10)
// 分支覆盖
const microAppElement11 = document.createElement('micro-app')
microAppElement11.setAttribute('name', 'test-app11')
microAppElement11.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/common/`)
appCon.appendChild(microAppElement11)
await new Promise((resolve) => {
microAppElement11.addEventListener('mounted', () => {
resolve(true)
}, false)
})
appCon.removeChild(microAppElement11)
appCon.appendChild(microAppElement11)
})
// 修改name或url成功,且修改后的应用为预加载或已经卸载的应用,此时直接从缓存中重新挂载
test('change name or url to an exist prefetch/unmount app ', async () => {
const microAppElement13 = document.createElement('micro-app')
microAppElement13.setAttribute('name', 'test-app13')
microAppElement13.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/dynamic/`)
appCon.appendChild(microAppElement13)
await new Promise((resolve) => {
function handleMounted () {
microAppElement13.removeEventListener('mounted', handleMounted)
// test-app12# 会格式化为 test-app12
microAppElement13.setAttribute('name', 'test-app12#')
defer(() => {
expect(microAppElement13.getAttribute('name')).toBe('test-app12')
})
microAppElement13.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/common`)
resolve(true)
}
microAppElement13.addEventListener('mounted', handleMounted, false)
})
await new Promise((resolve) => {
defer(() => {
expect(appInstanceMap.get('test-app12')?.isPrefetch).toBeFalsy()
resolve(true)
})
})
})
// getBaseRouteCompatible 分支覆盖
test('coverage branch of getBaseRouteCompatible', async () => {
const microAppElement14 = document.createElement('micro-app')
microAppElement14.setAttribute('name', 'test-app14')
microAppElement14.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/common/`)
microAppElement14.setAttribute('baseroute', '/path')
appCon.appendChild(microAppElement14)
await new Promise((resolve) => {
microAppElement14.addEventListener('mounted', () => {
resolve(true)
}, false)
})
})
// 先插入micro-app元素,后设置name、url属性
test('set name & url after connectedCallback', async () => {
const microAppElement15 = document.createElement('micro-app')
appCon.appendChild(microAppElement15)
microAppElement15.setAttribute('name', 'test-app15')
microAppElement15.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/common/`)
await new Promise((resolve) => {
microAppElement15.addEventListener('mounted', () => {
resolve(true)
}, false)
})
})
// 当新的app与旧的app name相同而url不同时,且旧app已经卸载,则删除旧app的缓存,使用新app覆盖
test('overwrite unmount app when name conflicts', async () => {
const microAppElement16 = document.createElement('micro-app')
microAppElement16.setAttribute('name', 'test-app16')
microAppElement16.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/common`)
appCon.appendChild(microAppElement16)
await new Promise((resolve) => {
microAppElement16.addEventListener('mounted', () => {
appCon.removeChild(microAppElement16)
resolve(true)
})
})
const microAppElement17 = document.createElement('micro-app')
// name相同,url不同
microAppElement17.setAttribute('name', 'test-app16')
microAppElement17.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/dynamic/`)
appCon.appendChild(microAppElement17)
await new Promise((resolve) => {
microAppElement17.addEventListener('mounted', () => {
expect(appInstanceMap.get('test-app16')!.url).toBe(`http://127.0.0.1:${ports.micro_app_element}/dynamic/`)
resolve(true)
})
})
})
// 测试一些带有特殊符号的name
test('test name with special characters', async () => {
// scene1: 格式化后name为空
const microAppElement18 = document.createElement('micro-app')
microAppElement18.setAttribute('name', '123$')
expect(console.error).toBeCalledWith('[micro-app] Invalid attribute name 123$')
// scene2: 前后name不一致,重新赋值
const microAppElement19 = document.createElement('micro-app')
microAppElement19.setAttribute('name', 'test-app19$')
expect(microAppElement19.getAttribute('name')).toBe('test-app19')
})
// 测试ssr配置
test('test ssr mode', async () => {
const microAppElement20 = document.createElement('micro-app')
microAppElement20.setAttribute('name', 'test-app20')
microAppElement20.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/common`)
microAppElement20.setAttribute('ssr', 'true')
// 场景1: 测试正常渲染的ssr应用
appCon.appendChild(microAppElement20)
// connectedCallback中会对url地址进行格式化,因为jest环境下,location.pathname 默认为 '/',所以/common被截掉
expect(microAppElement20.ssrUrl).toBe(`http://127.0.0.1:${ports.micro_app_element}/`)
// 场景2: 再次渲染时,去除ssr配置,如果有 ssrUrl,则进行删除
appCon.removeChild(microAppElement20)
microAppElement20.removeAttribute('ssr')
appCon.appendChild(microAppElement20)
expect(microAppElement20.ssrUrl).toBe('')
// 场景3: ssr模式下动态修改url的值,此时ssrUrl会进行同步更新
appCon.removeChild(microAppElement20)
microAppElement20.setAttribute('ssr', 'true')
appCon.appendChild(microAppElement20)
await new Promise((resolve) => {
microAppElement20.addEventListener('mounted', () => {
microAppElement20.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/dynamic/`)
defer(() => {
expect(microAppElement20.ssrUrl).toBe(`http://127.0.0.1:${ports.micro_app_element}/`)
resolve(true)
})
})
})
// 场景4: ssr模式已经渲染,修改url的值的同时去除ssr配置,需要将ssrUrl的值删除
const microAppElement21 = document.createElement('micro-app')
microAppElement21.setAttribute('name', 'test-app21')
microAppElement21.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/common`)
microAppElement21.setAttribute('ssr', 'true')
appCon.appendChild(microAppElement21)
await new Promise((resolve) => {
microAppElement21.addEventListener('mounted', () => {
microAppElement21.removeAttribute('ssr')
microAppElement21.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/dynamic/`)
defer(() => {
expect(microAppElement21.ssrUrl).toBe('')
resolve(true)
})
})
})
})
// test keep-alive 场景1: 正常渲染、隐藏、重新渲染
test('normal process of keep-alive', async () => {
const microAppElement22 = document.createElement('micro-app')
microAppElement22.setAttribute('name', 'test-app22')
microAppElement22.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/common/`)
microAppElement22.setAttribute('keep-alive', 'true')
appCon.appendChild(microAppElement22)
await new Promise((resolve) => {
microAppElement22.addEventListener('mounted', () => {
resolve(true)
})
})
const beforeShowListener = jest.fn()
const afterShowListener = jest.fn()
const afterHiddenListener = jest.fn()
microAppElement22.addEventListener('beforeshow', beforeShowListener)
microAppElement22.addEventListener('aftershow', afterShowListener)
microAppElement22.addEventListener('afterhidden', afterHiddenListener)
appCon.removeChild(microAppElement22)
// dispatch event afterhidden to base app and micro app
expect(afterHiddenListener).toBeCalledWith(expect.any(CustomEvent))
expect(window.keepAliveListener).toBeCalledWith('afterhidden')
appCon.appendChild(microAppElement22)
defer(() => {
// dispatch event beforeshow to base app and micro app
expect(beforeShowListener).toBeCalledWith(expect.any(CustomEvent))
expect(window.keepAliveListener).toBeCalledWith('beforeshow')
// dispatch event aftershow to base app and micro app
expect(afterShowListener).toBeCalledWith(expect.any(CustomEvent))
expect(window.keepAliveListener).toBeCalledWith('aftershow')
})
// 分支覆盖之 keep-alive 模式下开启 shadowRoot
appCon.removeChild(microAppElement22)
microAppElement22.setAttribute('shadowDom', 'true')
appCon.appendChild(microAppElement22)
})
// test keep-alive 场景2: 二次渲染时,url冲突,卸载旧应用,重新渲染
test('url conflict when remount of keep-alive', async () => {
const microAppElement23 = document.createElement('micro-app')
microAppElement23.setAttribute('name', 'test-app23')
microAppElement23.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/common/`)
microAppElement23.setAttribute('keep-alive', 'true')
appCon.appendChild(microAppElement23)
await new Promise((resolve) => {
microAppElement23.addEventListener('mounted', () => {
resolve(true)
})
})
appCon.removeChild(microAppElement23)
const microAppElement24 = document.createElement('micro-app')
microAppElement24.setAttribute('name', 'test-app23')
microAppElement24.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/dynamic/`)
appCon.appendChild(microAppElement24)
expect(console.error).toHaveBeenCalledWith('[micro-app] app test-app23: app name conflict, an app named test-app23 is running')
})
// test keep-alive 场景3: 修改micro-app name、url属性相关操作
test('url conflict when remount of keep-alive', async () => {
const microAppElement25 = document.createElement('micro-app')
microAppElement25.setAttribute('name', 'test-app25')
microAppElement25.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/dynamic/`)
microAppElement25.setAttribute('keep-alive', 'true')
appCon.appendChild(microAppElement25)
await new Promise((resolve) => {
microAppElement25.addEventListener('mounted', () => {
resolve(true)
})
})
// afterhidden事件指向test-app25
const afterHiddenListenerForTestApp25 = jest.fn()
microAppElement25.addEventListener('afterhidden', afterHiddenListenerForTestApp25)
// beforeshow和aftershow事件指向test-app23
const beforeShowListenerForTestApp23 = jest.fn()
const afterShowListenerForTestApp23 = jest.fn()
microAppElement25.addEventListener('beforeshow', beforeShowListenerForTestApp23)
microAppElement25.addEventListener('aftershow', afterShowListenerForTestApp23)
// 修改name和url
microAppElement25.setAttribute('name', 'test-app23')
microAppElement25.setAttribute('url', `http://127.0.0.1:${ports.micro_app_element}/common/`)
await new Promise((resolve) => {
// name和url的修改是异步的,这里放在定时器中执行
setTimeout(() => {
// dispatch event afterhidden to base app
expect(afterHiddenListenerForTestApp25).toBeCalledWith(expect.any(CustomEvent))
// dispatch event beforeshow to base app
expect(beforeShowListenerForTestApp23).toBeCalledWith(expect.any(CustomEvent))
// dispatch event aftershow to base app
expect(afterShowListenerForTestApp23).toBeCalledWith(expect.any(CustomEvent))
resolve(true)
}, 50)
})
// 修改name为test-app25,test-app25为隐藏状态,但url没有修改,此时url冲突,keep-alive报错
microAppElement25.setAttribute('name', 'test-app25')
await new Promise((resolve) => {
defer(() => {
expect(console.error).toHaveBeenCalledWith('[micro-app] app test-app25: app name conflict, an app named test-app25 is running')
resolve(true)
})
})
})
}) | the_stack |
import { GlobalProps } from 'ojs/ojvcomponent';
import { ComponentChildren } from 'preact';
import AsyncValidator = require('../ojvalidator-async');
import Validator = require('../ojvalidator');
import Converter = require('../ojconverter');
import { Validation } from '../ojvalidationfactory-base';
import { DataProvider } from '../ojdataprovider';
import RequiredValidator = require('../ojvalidator-required');
import RegExpValidator = require('../ojvalidator-regexp');
import NumberRangeValidator = require('../ojvalidator-numberrange');
import LengthValidator = require('../ojvalidator-length');
import DateTimeRangeValidator = require('../ojvalidator-datetimerange');
import DateRestrictionValidator = require('../ojvalidator-daterestriction');
import { IntlDateTimeConverter, DateTimeConverter } from '../ojconverter-datetime';
import { IntlNumberConverter, NumberConverter } from '../ojconverter-number';
import { editableValue, editableValueEventMap, editableValueSettableProperties } from '../ojeditablevalue';
import { JetElement, JetSettableProperties, JetElementCustomEvent, JetSetPropertyType } from '..';
export interface ojCombobox<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> extends editableValue<V, SP, SV, RV> {
displayOptions?: {
converterHint?: 'display' | 'none';
helpInstruction?: Array<'notewindow' | 'none'> | 'notewindow' | 'none';
messages?: 'display' | 'none';
validatorHint?: 'display' | 'none';
};
addEventListener<T extends keyof ojComboboxEventMap<V, SP, SV, RV>>(type: T, listener: (this: HTMLElement, ev: ojComboboxEventMap<V, SP, SV, RV>[T]) => any, options?: (boolean |
AddEventListenerOptions)): void;
addEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: (boolean | AddEventListenerOptions)): void;
getProperty<T extends keyof ojComboboxSettableProperties<V, SV, RV>>(property: T): ojCombobox<V, SP, SV, RV>[T];
getProperty(property: string): any;
setProperty<T extends keyof ojComboboxSettableProperties<V, SV, RV>>(property: T, value: ojComboboxSettableProperties<V, SV, RV>[T]): void;
setProperty<T extends string>(property: T, value: JetSetPropertyType<T, ojComboboxSettableProperties<V, SV, RV>>): void;
setProperties(properties: ojComboboxSettablePropertiesLenient<V, SV, RV>): void;
refresh(): void;
validate(): Promise<any>;
}
export namespace ojCombobox {
interface ojAnimateEnd extends CustomEvent<{
action: string;
element: Element;
[propName: string]: any;
}> {
}
interface ojAnimateStart extends CustomEvent<{
action: string;
element: Element;
endCallback: (() => void);
[propName: string]: any;
}> {
}
// tslint:disable-next-line interface-over-type-literal
type displayOptionsChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = JetElementCustomEvent<ojCombobox<V, SP, SV, RV>["displayOptions"]>;
//------------------------------------------------------------
// Start: generated events for inherited properties
//------------------------------------------------------------
// tslint:disable-next-line interface-over-type-literal
type describedByChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.describedByChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type disabledChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.disabledChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type helpChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.helpChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type helpHintsChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.helpHintsChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type labelEdgeChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.labelEdgeChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type labelHintChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.labelHintChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type messagesCustomChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.messagesCustomChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type userAssistanceDensityChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.userAssistanceDensityChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type validChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.validChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type valueChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.valueChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type Optgroup = {
children: Array<Option | Optgroup>;
disabled?: boolean;
label: string;
};
// tslint:disable-next-line interface-over-type-literal
type Option = {
disabled?: boolean;
label?: string;
value: any;
};
// tslint:disable-next-line interface-over-type-literal
type OptionContext<D = any> = {
componentElement: Element;
data: D;
depth: number;
index: number;
leaf: boolean;
parent: Element;
parentElement: Element;
};
// tslint:disable-next-line interface-over-type-literal
type OptionsKeys = {
childKeys?: OptionsKeys;
children?: string;
label?: string;
value?: string;
};
}
export interface ojComboboxEventMap<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> extends editableValueEventMap<V, SP, SV, RV> {
'ojAnimateEnd': ojCombobox.ojAnimateEnd;
'ojAnimateStart': ojCombobox.ojAnimateStart;
'displayOptionsChanged': JetElementCustomEvent<ojCombobox<V, SP, SV, RV>["displayOptions"]>;
'describedByChanged': JetElementCustomEvent<ojCombobox<V, SP, SV, RV>["describedBy"]>;
'disabledChanged': JetElementCustomEvent<ojCombobox<V, SP, SV, RV>["disabled"]>;
'helpChanged': JetElementCustomEvent<ojCombobox<V, SP, SV, RV>["help"]>;
'helpHintsChanged': JetElementCustomEvent<ojCombobox<V, SP, SV, RV>["helpHints"]>;
'labelEdgeChanged': JetElementCustomEvent<ojCombobox<V, SP, SV, RV>["labelEdge"]>;
'labelHintChanged': JetElementCustomEvent<ojCombobox<V, SP, SV, RV>["labelHint"]>;
'messagesCustomChanged': JetElementCustomEvent<ojCombobox<V, SP, SV, RV>["messagesCustom"]>;
'userAssistanceDensityChanged': JetElementCustomEvent<ojCombobox<V, SP, SV, RV>["userAssistanceDensity"]>;
'validChanged': JetElementCustomEvent<ojCombobox<V, SP, SV, RV>["valid"]>;
'valueChanged': JetElementCustomEvent<ojCombobox<V, SP, SV, RV>["value"]>;
}
export interface ojComboboxSettableProperties<V, SV = V, RV = V> extends editableValueSettableProperties<V, SV, RV> {
displayOptions?: {
converterHint?: 'display' | 'none';
helpInstruction?: Array<'notewindow' | 'none'> | 'notewindow' | 'none';
messages?: 'display' | 'none';
validatorHint?: 'display' | 'none';
};
}
export interface ojComboboxSettablePropertiesLenient<V, SV = V, RV = V> extends Partial<ojComboboxSettableProperties<V, SV, RV>> {
[key: string]: any;
}
export interface ojComboboxMany<K, D, V = any> extends ojCombobox<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]> {
asyncValidators: Array<AsyncValidator<V[]>>;
converter: Promise<Converter<V>> | Converter<V> | null;
labelledBy: string | null;
maximumResultCount: number;
minLength: number;
optionRenderer?: ((param0: ojCombobox.OptionContext<D>) => Element) | null;
options: Array<ojCombobox.Option | ojCombobox.Optgroup> | DataProvider<K, D> | null;
optionsKeys: ojCombobox.OptionsKeys | null;
pickerAttributes: {
class?: string;
style?: string;
};
placeholder: string | null;
readonly rawValue: string[] | null;
readonly: boolean;
required: boolean;
validators: Array<Validator<V> | AsyncValidator<V>> | null;
value: V[] | null;
valueOptions: Array<{
value: V;
label?: string;
}> | null;
translations: {
filterFurther?: string;
moreMatchesFound?: string;
noMatchesFound?: string;
noMoreResults?: string;
oneMatchesFound?: string;
required?: {
hint?: string;
messageDetail?: string;
messageSummary?: string;
};
};
addEventListener<T extends keyof ojComboboxManyEventMap<K, D, V>>(type: T, listener: (this: HTMLElement, ev: ojComboboxManyEventMap<K, D, V>[T]) => any, options?: (boolean |
AddEventListenerOptions)): void;
addEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: (boolean | AddEventListenerOptions)): void;
getProperty<T extends keyof ojComboboxManySettableProperties<K, D, V>>(property: T): ojComboboxMany<K, D, V>[T];
getProperty(property: string): any;
setProperty<T extends keyof ojComboboxManySettableProperties<K, D, V>>(property: T, value: ojComboboxManySettableProperties<K, D, V>[T]): void;
setProperty<T extends string>(property: T, value: JetSetPropertyType<T, ojComboboxManySettableProperties<K, D, V>>): void;
setProperties(properties: ojComboboxManySettablePropertiesLenient<K, D, V>): void;
}
export namespace ojComboboxMany {
interface ojAnimateEnd extends CustomEvent<{
action: string;
element: Element;
[propName: string]: any;
}> {
}
interface ojAnimateStart extends CustomEvent<{
action: string;
element: Element;
endCallback: (() => void);
[propName: string]: any;
}> {
}
// tslint:disable-next-line interface-over-type-literal
type asyncValidatorsChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["asyncValidators"]>;
// tslint:disable-next-line interface-over-type-literal
type converterChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["converter"]>;
// tslint:disable-next-line interface-over-type-literal
type labelledByChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["labelledBy"]>;
// tslint:disable-next-line interface-over-type-literal
type maximumResultCountChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["maximumResultCount"]>;
// tslint:disable-next-line interface-over-type-literal
type minLengthChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["minLength"]>;
// tslint:disable-next-line interface-over-type-literal
type optionRendererChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["optionRenderer"]>;
// tslint:disable-next-line interface-over-type-literal
type optionsChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["options"]>;
// tslint:disable-next-line interface-over-type-literal
type optionsKeysChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["optionsKeys"]>;
// tslint:disable-next-line interface-over-type-literal
type pickerAttributesChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["pickerAttributes"]>;
// tslint:disable-next-line interface-over-type-literal
type placeholderChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["placeholder"]>;
// tslint:disable-next-line interface-over-type-literal
type rawValueChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["rawValue"]>;
// tslint:disable-next-line interface-over-type-literal
type readonlyChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["readonly"]>;
// tslint:disable-next-line interface-over-type-literal
type requiredChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["required"]>;
// tslint:disable-next-line interface-over-type-literal
type validatorsChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["validators"]>;
// tslint:disable-next-line interface-over-type-literal
type valueChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["value"]>;
// tslint:disable-next-line interface-over-type-literal
type valueOptionsChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["valueOptions"]>;
//------------------------------------------------------------
// Start: generated events for inherited properties
//------------------------------------------------------------
// tslint:disable-next-line interface-over-type-literal
type describedByChanged<K, D, V = any> = ojCombobox.describedByChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
// tslint:disable-next-line interface-over-type-literal
type disabledChanged<K, D, V = any> = ojCombobox.disabledChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
// tslint:disable-next-line interface-over-type-literal
type displayOptionsChanged<K, D, V = any> = ojCombobox.displayOptionsChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
// tslint:disable-next-line interface-over-type-literal
type helpChanged<K, D, V = any> = ojCombobox.helpChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
// tslint:disable-next-line interface-over-type-literal
type helpHintsChanged<K, D, V = any> = ojCombobox.helpHintsChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
// tslint:disable-next-line interface-over-type-literal
type labelEdgeChanged<K, D, V = any> = ojCombobox.labelEdgeChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
// tslint:disable-next-line interface-over-type-literal
type labelHintChanged<K, D, V = any> = ojCombobox.labelHintChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
// tslint:disable-next-line interface-over-type-literal
type messagesCustomChanged<K, D, V = any> = ojCombobox.messagesCustomChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
// tslint:disable-next-line interface-over-type-literal
type userAssistanceDensityChanged<K, D, V = any> = ojCombobox.userAssistanceDensityChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
// tslint:disable-next-line interface-over-type-literal
type validChanged<K, D, V = any> = ojCombobox.validChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
//------------------------------------------------------------
// End: generated events for inherited properties
//------------------------------------------------------------
}
export interface ojComboboxManyEventMap<K, D, V = any> extends ojComboboxEventMap<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]> {
'ojAnimateEnd': ojComboboxMany.ojAnimateEnd;
'ojAnimateStart': ojComboboxMany.ojAnimateStart;
'asyncValidatorsChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["asyncValidators"]>;
'converterChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["converter"]>;
'labelledByChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["labelledBy"]>;
'maximumResultCountChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["maximumResultCount"]>;
'minLengthChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["minLength"]>;
'optionRendererChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["optionRenderer"]>;
'optionsChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["options"]>;
'optionsKeysChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["optionsKeys"]>;
'pickerAttributesChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["pickerAttributes"]>;
'placeholderChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["placeholder"]>;
'rawValueChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["rawValue"]>;
'readonlyChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["readonly"]>;
'requiredChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["required"]>;
'validatorsChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["validators"]>;
'valueChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["value"]>;
'valueOptionsChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["valueOptions"]>;
'describedByChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["describedBy"]>;
'disabledChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["disabled"]>;
'displayOptionsChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["displayOptions"]>;
'helpChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["help"]>;
'helpHintsChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["helpHints"]>;
'labelEdgeChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["labelEdge"]>;
'labelHintChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["labelHint"]>;
'messagesCustomChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["messagesCustom"]>;
'userAssistanceDensityChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["userAssistanceDensity"]>;
'validChanged': JetElementCustomEvent<ojComboboxMany<K, D, V>["valid"]>;
}
export interface ojComboboxManySettableProperties<K, D, V = any> extends ojComboboxSettableProperties<V[]> {
asyncValidators: Array<AsyncValidator<V[]>>;
converter: Promise<Converter<V>> | Converter<V> | null;
labelledBy: string | null;
maximumResultCount: number;
minLength: number;
optionRenderer?: ((param0: ojCombobox.OptionContext<D>) => Element) | null;
options: Array<ojCombobox.Option | ojCombobox.Optgroup> | DataProvider<K, D> | null;
optionsKeys: ojCombobox.OptionsKeys | null;
pickerAttributes: {
class?: string;
style?: string;
};
placeholder: string | null;
readonly rawValue: string[] | null;
readonly: boolean;
required: boolean;
validators: Array<Validator<V> | AsyncValidator<V>> | null;
value: V[] | null;
valueOptions: Array<{
value: V;
label?: string;
}> | null;
translations: {
filterFurther?: string;
moreMatchesFound?: string;
noMatchesFound?: string;
noMoreResults?: string;
oneMatchesFound?: string;
required?: {
hint?: string;
messageDetail?: string;
messageSummary?: string;
};
};
}
export interface ojComboboxManySettablePropertiesLenient<K, D, V = any> extends Partial<ojComboboxManySettableProperties<K, D, V>> {
[key: string]: any;
}
export interface ojComboboxOne<K, D, V = any> extends ojCombobox<V, ojComboboxOneSettableProperties<K, D, V>, V, string> {
asyncValidators: Array<AsyncValidator<V>>;
converter: Promise<Converter<V>> | Converter<V> | null;
filterOnOpen: 'none' | 'rawValue';
labelledBy: string | null;
maximumResultCount: number;
minLength: number;
optionRenderer?: ((param0: ojCombobox.OptionContext<D>) => Element) | null;
options: Array<ojCombobox.Option | ojCombobox.Optgroup> | DataProvider<K, D> | null;
optionsKeys: ojCombobox.OptionsKeys | null;
pickerAttributes: {
class?: string;
style?: string;
};
placeholder: string | null;
readonly rawValue: string | null;
readonly: boolean;
required: boolean;
validators: Array<Validator<V> | AsyncValidator<V>> | null;
value: V | null;
valueOption: {
label?: string;
value: V | null;
};
translations: {
filterFurther?: string;
moreMatchesFound?: string;
noMatchesFound?: string;
oneMatchesFound?: string;
required?: {
hint?: string;
messageDetail?: string;
messageSummary?: string;
};
};
addEventListener<T extends keyof ojComboboxOneEventMap<K, D, V>>(type: T, listener: (this: HTMLElement, ev: ojComboboxOneEventMap<K, D, V>[T]) => any, options?: (boolean |
AddEventListenerOptions)): void;
addEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: (boolean | AddEventListenerOptions)): void;
getProperty<T extends keyof ojComboboxOneSettableProperties<K, D, V>>(property: T): ojComboboxOne<K, D, V>[T];
getProperty(property: string): any;
setProperty<T extends keyof ojComboboxOneSettableProperties<K, D, V>>(property: T, value: ojComboboxOneSettableProperties<K, D, V>[T]): void;
setProperty<T extends string>(property: T, value: JetSetPropertyType<T, ojComboboxOneSettableProperties<K, D, V>>): void;
setProperties(properties: ojComboboxOneSettablePropertiesLenient<K, D, V>): void;
}
export namespace ojComboboxOne {
interface ojAnimateEnd extends CustomEvent<{
action: string;
element: Element;
[propName: string]: any;
}> {
}
interface ojAnimateStart extends CustomEvent<{
action: string;
element: Element;
endCallback: (() => void);
[propName: string]: any;
}> {
}
interface ojValueUpdated extends CustomEvent<{
previousValue: any;
value: any;
[propName: string]: any;
}> {
}
// tslint:disable-next-line interface-over-type-literal
type asyncValidatorsChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["asyncValidators"]>;
// tslint:disable-next-line interface-over-type-literal
type converterChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["converter"]>;
// tslint:disable-next-line interface-over-type-literal
type filterOnOpenChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["filterOnOpen"]>;
// tslint:disable-next-line interface-over-type-literal
type labelledByChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["labelledBy"]>;
// tslint:disable-next-line interface-over-type-literal
type maximumResultCountChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["maximumResultCount"]>;
// tslint:disable-next-line interface-over-type-literal
type minLengthChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["minLength"]>;
// tslint:disable-next-line interface-over-type-literal
type optionRendererChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["optionRenderer"]>;
// tslint:disable-next-line interface-over-type-literal
type optionsChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["options"]>;
// tslint:disable-next-line interface-over-type-literal
type optionsKeysChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["optionsKeys"]>;
// tslint:disable-next-line interface-over-type-literal
type pickerAttributesChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["pickerAttributes"]>;
// tslint:disable-next-line interface-over-type-literal
type placeholderChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["placeholder"]>;
// tslint:disable-next-line interface-over-type-literal
type rawValueChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["rawValue"]>;
// tslint:disable-next-line interface-over-type-literal
type readonlyChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["readonly"]>;
// tslint:disable-next-line interface-over-type-literal
type requiredChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["required"]>;
// tslint:disable-next-line interface-over-type-literal
type validatorsChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["validators"]>;
// tslint:disable-next-line interface-over-type-literal
type valueChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["value"]>;
// tslint:disable-next-line interface-over-type-literal
type valueOptionChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["valueOption"]>;
//------------------------------------------------------------
// Start: generated events for inherited properties
//------------------------------------------------------------
// tslint:disable-next-line interface-over-type-literal
type describedByChanged<K, D, V = any> = ojCombobox.describedByChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
// tslint:disable-next-line interface-over-type-literal
type disabledChanged<K, D, V = any> = ojCombobox.disabledChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
// tslint:disable-next-line interface-over-type-literal
type displayOptionsChanged<K, D, V = any> = ojCombobox.displayOptionsChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
// tslint:disable-next-line interface-over-type-literal
type helpChanged<K, D, V = any> = ojCombobox.helpChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
// tslint:disable-next-line interface-over-type-literal
type helpHintsChanged<K, D, V = any> = ojCombobox.helpHintsChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
// tslint:disable-next-line interface-over-type-literal
type labelEdgeChanged<K, D, V = any> = ojCombobox.labelEdgeChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
// tslint:disable-next-line interface-over-type-literal
type labelHintChanged<K, D, V = any> = ojCombobox.labelHintChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
// tslint:disable-next-line interface-over-type-literal
type messagesCustomChanged<K, D, V = any> = ojCombobox.messagesCustomChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
// tslint:disable-next-line interface-over-type-literal
type userAssistanceDensityChanged<K, D, V = any> = ojCombobox.userAssistanceDensityChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
// tslint:disable-next-line interface-over-type-literal
type validChanged<K, D, V = any> = ojCombobox.validChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
//------------------------------------------------------------
// End: generated events for inherited properties
//------------------------------------------------------------
}
export interface ojComboboxOneEventMap<K, D, V = any> extends ojComboboxEventMap<V, ojComboboxOneSettableProperties<K, D, V>, V, string> {
'ojAnimateEnd': ojComboboxOne.ojAnimateEnd;
'ojAnimateStart': ojComboboxOne.ojAnimateStart;
'ojValueUpdated': ojComboboxOne.ojValueUpdated;
'asyncValidatorsChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["asyncValidators"]>;
'converterChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["converter"]>;
'filterOnOpenChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["filterOnOpen"]>;
'labelledByChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["labelledBy"]>;
'maximumResultCountChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["maximumResultCount"]>;
'minLengthChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["minLength"]>;
'optionRendererChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["optionRenderer"]>;
'optionsChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["options"]>;
'optionsKeysChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["optionsKeys"]>;
'pickerAttributesChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["pickerAttributes"]>;
'placeholderChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["placeholder"]>;
'rawValueChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["rawValue"]>;
'readonlyChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["readonly"]>;
'requiredChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["required"]>;
'validatorsChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["validators"]>;
'valueChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["value"]>;
'valueOptionChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["valueOption"]>;
'describedByChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["describedBy"]>;
'disabledChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["disabled"]>;
'displayOptionsChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["displayOptions"]>;
'helpChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["help"]>;
'helpHintsChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["helpHints"]>;
'labelEdgeChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["labelEdge"]>;
'labelHintChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["labelHint"]>;
'messagesCustomChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["messagesCustom"]>;
'userAssistanceDensityChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["userAssistanceDensity"]>;
'validChanged': JetElementCustomEvent<ojComboboxOne<K, D, V>["valid"]>;
}
export interface ojComboboxOneSettableProperties<K, D, V = any> extends ojComboboxSettableProperties<V> {
asyncValidators: Array<AsyncValidator<V>>;
converter: Promise<Converter<V>> | Converter<V> | null;
filterOnOpen: 'none' | 'rawValue';
labelledBy: string | null;
maximumResultCount: number;
minLength: number;
optionRenderer?: ((param0: ojCombobox.OptionContext<D>) => Element) | null;
options: Array<ojCombobox.Option | ojCombobox.Optgroup> | DataProvider<K, D> | null;
optionsKeys: ojCombobox.OptionsKeys | null;
pickerAttributes: {
class?: string;
style?: string;
};
placeholder: string | null;
readonly rawValue: string | null;
readonly: boolean;
required: boolean;
validators: Array<Validator<V> | AsyncValidator<V>> | null;
value: V | null;
valueOption: {
label?: string;
value: V | null;
};
translations: {
filterFurther?: string;
moreMatchesFound?: string;
noMatchesFound?: string;
oneMatchesFound?: string;
required?: {
hint?: string;
messageDetail?: string;
messageSummary?: string;
};
};
}
export interface ojComboboxOneSettablePropertiesLenient<K, D, V = any> extends Partial<ojComboboxOneSettableProperties<K, D, V>> {
[key: string]: any;
}
export interface ojSelect<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> extends editableValue<V, SP, SV> {
displayOptions?: {
converterHint?: 'display' | 'none';
helpInstruction?: Array<'notewindow' | 'none'> | 'notewindow' | 'none';
messages?: 'display' | 'none';
validatorHint?: 'display' | 'none';
};
labelledBy: string | null;
addEventListener<T extends keyof ojSelectEventMap<V, SP, SV>>(type: T, listener: (this: HTMLElement, ev: ojSelectEventMap<V, SP, SV>[T]) => any, options?: (boolean |
AddEventListenerOptions)): void;
addEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: (boolean | AddEventListenerOptions)): void;
getProperty<T extends keyof ojSelectSettableProperties<V, SV>>(property: T): ojSelect<V, SP, SV>[T];
getProperty(property: string): any;
setProperty<T extends keyof ojSelectSettableProperties<V, SV>>(property: T, value: ojSelectSettableProperties<V, SV>[T]): void;
setProperty<T extends string>(property: T, value: JetSetPropertyType<T, ojSelectSettableProperties<V, SV>>): void;
setProperties(properties: ojSelectSettablePropertiesLenient<V, SV>): void;
refresh(): void;
validate(): Promise<any>;
}
export namespace ojSelect {
interface ojAnimateEnd extends CustomEvent<{
action: string;
element: Element;
[propName: string]: any;
}> {
}
interface ojAnimateStart extends CustomEvent<{
action: string;
element: Element;
endCallback: (() => void);
[propName: string]: any;
}> {
}
// tslint:disable-next-line interface-over-type-literal
type displayOptionsChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = JetElementCustomEvent<ojSelect<V, SP, SV>["displayOptions"]>;
// tslint:disable-next-line interface-over-type-literal
type labelledByChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = JetElementCustomEvent<ojSelect<V, SP, SV>["labelledBy"]>;
//------------------------------------------------------------
// Start: generated events for inherited properties
//------------------------------------------------------------
// tslint:disable-next-line interface-over-type-literal
type describedByChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.describedByChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type disabledChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.disabledChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type helpChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.helpChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type helpHintsChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.helpHintsChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type labelEdgeChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.labelEdgeChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type labelHintChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.labelHintChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type messagesCustomChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.messagesCustomChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type userAssistanceDensityChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.userAssistanceDensityChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type validChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.validChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type valueChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.valueChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type Optgroup = {
children: Array<(Option | Optgroup)>;
disabled?: boolean;
label: string;
};
// tslint:disable-next-line interface-over-type-literal
type Option = {
disabled?: boolean;
label?: string;
value: any;
};
// tslint:disable-next-line interface-over-type-literal
type OptionContext<D = any> = {
componentElement: Element;
data: D;
depth: number;
index: number;
leaf: boolean;
parent: Element;
parentElement: Element;
};
// tslint:disable-next-line interface-over-type-literal
type OptionsKeys = {
childKeys?: (OptionsKeys);
children?: string;
label?: string;
value?: string;
};
}
export interface ojSelectEventMap<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> extends editableValueEventMap<V, SP, SV> {
'ojAnimateEnd': ojSelect.ojAnimateEnd;
'ojAnimateStart': ojSelect.ojAnimateStart;
'displayOptionsChanged': JetElementCustomEvent<ojSelect<V, SP, SV>["displayOptions"]>;
'labelledByChanged': JetElementCustomEvent<ojSelect<V, SP, SV>["labelledBy"]>;
'describedByChanged': JetElementCustomEvent<ojSelect<V, SP, SV>["describedBy"]>;
'disabledChanged': JetElementCustomEvent<ojSelect<V, SP, SV>["disabled"]>;
'helpChanged': JetElementCustomEvent<ojSelect<V, SP, SV>["help"]>;
'helpHintsChanged': JetElementCustomEvent<ojSelect<V, SP, SV>["helpHints"]>;
'labelEdgeChanged': JetElementCustomEvent<ojSelect<V, SP, SV>["labelEdge"]>;
'labelHintChanged': JetElementCustomEvent<ojSelect<V, SP, SV>["labelHint"]>;
'messagesCustomChanged': JetElementCustomEvent<ojSelect<V, SP, SV>["messagesCustom"]>;
'userAssistanceDensityChanged': JetElementCustomEvent<ojSelect<V, SP, SV>["userAssistanceDensity"]>;
'validChanged': JetElementCustomEvent<ojSelect<V, SP, SV>["valid"]>;
'valueChanged': JetElementCustomEvent<ojSelect<V, SP, SV>["value"]>;
}
export interface ojSelectSettableProperties<V, SV = V> extends editableValueSettableProperties<V, SV> {
displayOptions?: {
converterHint?: 'display' | 'none';
helpInstruction?: Array<'notewindow' | 'none'> | 'notewindow' | 'none';
messages?: 'display' | 'none';
validatorHint?: 'display' | 'none';
};
labelledBy: string | null;
}
export interface ojSelectSettablePropertiesLenient<V, SV = V> extends Partial<ojSelectSettableProperties<V, SV>> {
[key: string]: any;
}
export interface ojSelectMany<K, D, V = any> extends ojSelect<V[], ojSelectManySettableProperties<K, D, V>> {
labelledBy: string | null;
maximumResultCount: number;
minimumResultsForSearch: number;
optionRenderer?: ((param0: ojSelect.OptionContext<D>) => Element) | null;
options: Array<ojSelect.Option | ojSelect.Optgroup> | DataProvider<K, D> | null;
optionsKeys: ojSelect.OptionsKeys | null;
pickerAttributes: {
class?: string;
style?: string;
};
placeholder: string | null;
readonly: boolean;
renderMode: 'jet' | 'native';
required: boolean;
value: V[] | null;
valueOptions: Array<{
value: V;
label?: string;
}> | null;
translations: {
filterFurther?: string;
moreMatchesFound?: string;
noMatchesFound?: string;
noMoreResults?: string;
oneMatchesFound?: string;
required?: {
hint?: string;
messageDetail?: string;
messageSummary?: string;
};
searchField?: string;
};
addEventListener<T extends keyof ojSelectManyEventMap<K, D, V>>(type: T, listener: (this: HTMLElement, ev: ojSelectManyEventMap<K, D, V>[T]) => any, options?: (boolean |
AddEventListenerOptions)): void;
addEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: (boolean | AddEventListenerOptions)): void;
getProperty<T extends keyof ojSelectManySettableProperties<K, D, V>>(property: T): ojSelectMany<K, D, V>[T];
getProperty(property: string): any;
setProperty<T extends keyof ojSelectManySettableProperties<K, D, V>>(property: T, value: ojSelectManySettableProperties<K, D, V>[T]): void;
setProperty<T extends string>(property: T, value: JetSetPropertyType<T, ojSelectManySettableProperties<K, D, V>>): void;
setProperties(properties: ojSelectManySettablePropertiesLenient<K, D, V>): void;
}
export namespace ojSelectMany {
interface ojAnimateEnd extends CustomEvent<{
action: string;
element: Element;
[propName: string]: any;
}> {
}
interface ojAnimateStart extends CustomEvent<{
action: string;
element: Element;
endCallback: (() => void);
[propName: string]: any;
}> {
}
// tslint:disable-next-line interface-over-type-literal
type labelledByChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["labelledBy"]>;
// tslint:disable-next-line interface-over-type-literal
type maximumResultCountChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["maximumResultCount"]>;
// tslint:disable-next-line interface-over-type-literal
type minimumResultsForSearchChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["minimumResultsForSearch"]>;
// tslint:disable-next-line interface-over-type-literal
type optionRendererChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["optionRenderer"]>;
// tslint:disable-next-line interface-over-type-literal
type optionsChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["options"]>;
// tslint:disable-next-line interface-over-type-literal
type optionsKeysChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["optionsKeys"]>;
// tslint:disable-next-line interface-over-type-literal
type pickerAttributesChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["pickerAttributes"]>;
// tslint:disable-next-line interface-over-type-literal
type placeholderChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["placeholder"]>;
// tslint:disable-next-line interface-over-type-literal
type readonlyChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["readonly"]>;
// tslint:disable-next-line interface-over-type-literal
type renderModeChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["renderMode"]>;
// tslint:disable-next-line interface-over-type-literal
type requiredChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["required"]>;
// tslint:disable-next-line interface-over-type-literal
type valueChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["value"]>;
// tslint:disable-next-line interface-over-type-literal
type valueOptionsChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["valueOptions"]>;
//------------------------------------------------------------
// Start: generated events for inherited properties
//------------------------------------------------------------
// tslint:disable-next-line interface-over-type-literal
type describedByChanged<K, D, V = any> = ojSelect.describedByChanged<V[], ojSelectManySettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type disabledChanged<K, D, V = any> = ojSelect.disabledChanged<V[], ojSelectManySettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type displayOptionsChanged<K, D, V = any> = ojSelect.displayOptionsChanged<V[], ojSelectManySettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type helpChanged<K, D, V = any> = ojSelect.helpChanged<V[], ojSelectManySettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type helpHintsChanged<K, D, V = any> = ojSelect.helpHintsChanged<V[], ojSelectManySettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type labelEdgeChanged<K, D, V = any> = ojSelect.labelEdgeChanged<V[], ojSelectManySettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type labelHintChanged<K, D, V = any> = ojSelect.labelHintChanged<V[], ojSelectManySettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type messagesCustomChanged<K, D, V = any> = ojSelect.messagesCustomChanged<V[], ojSelectManySettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type userAssistanceDensityChanged<K, D, V = any> = ojSelect.userAssistanceDensityChanged<V[], ojSelectManySettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type validChanged<K, D, V = any> = ojSelect.validChanged<V[], ojSelectManySettableProperties<K, D, V>>;
//------------------------------------------------------------
// End: generated events for inherited properties
//------------------------------------------------------------
}
export interface ojSelectManyEventMap<K, D, V = any> extends ojSelectEventMap<V[], ojSelectManySettableProperties<K, D, V>> {
'ojAnimateEnd': ojSelectMany.ojAnimateEnd;
'ojAnimateStart': ojSelectMany.ojAnimateStart;
'labelledByChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["labelledBy"]>;
'maximumResultCountChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["maximumResultCount"]>;
'minimumResultsForSearchChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["minimumResultsForSearch"]>;
'optionRendererChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["optionRenderer"]>;
'optionsChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["options"]>;
'optionsKeysChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["optionsKeys"]>;
'pickerAttributesChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["pickerAttributes"]>;
'placeholderChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["placeholder"]>;
'readonlyChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["readonly"]>;
'renderModeChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["renderMode"]>;
'requiredChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["required"]>;
'valueChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["value"]>;
'valueOptionsChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["valueOptions"]>;
'describedByChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["describedBy"]>;
'disabledChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["disabled"]>;
'displayOptionsChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["displayOptions"]>;
'helpChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["help"]>;
'helpHintsChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["helpHints"]>;
'labelEdgeChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["labelEdge"]>;
'labelHintChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["labelHint"]>;
'messagesCustomChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["messagesCustom"]>;
'userAssistanceDensityChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["userAssistanceDensity"]>;
'validChanged': JetElementCustomEvent<ojSelectMany<K, D, V>["valid"]>;
}
export interface ojSelectManySettableProperties<K, D, V = any[]> extends ojSelectSettableProperties<V[]> {
labelledBy: string | null;
maximumResultCount: number;
minimumResultsForSearch: number;
optionRenderer?: ((param0: ojSelect.OptionContext<D>) => Element) | null;
options: Array<ojSelect.Option | ojSelect.Optgroup> | DataProvider<K, D> | null;
optionsKeys: ojSelect.OptionsKeys | null;
pickerAttributes: {
class?: string;
style?: string;
};
placeholder: string | null;
readonly: boolean;
renderMode: 'jet' | 'native';
required: boolean;
value: V[] | null;
valueOptions: Array<{
value: V;
label?: string;
}> | null;
translations: {
filterFurther?: string;
moreMatchesFound?: string;
noMatchesFound?: string;
noMoreResults?: string;
oneMatchesFound?: string;
required?: {
hint?: string;
messageDetail?: string;
messageSummary?: string;
};
searchField?: string;
};
}
export interface ojSelectManySettablePropertiesLenient<K, D, V = any[]> extends Partial<ojSelectManySettableProperties<K, D, V>> {
[key: string]: any;
}
export interface ojSelectOne<K, D, V = any> extends ojSelect<V, ojSelectOneSettableProperties<K, D, V>> {
labelledBy: string | null;
maximumResultCount: number;
minimumResultsForSearch: number;
optionRenderer?: ((param0: ojSelect.OptionContext<D>) => Element) | null;
options: Array<ojSelect.Option | ojSelect.Optgroup> | DataProvider<K, D> | null;
optionsKeys: ojSelect.OptionsKeys | null;
pickerAttributes: {
class?: string;
style?: string;
};
placeholder: string | null;
readonly: boolean;
renderMode: 'jet' | 'native';
required: boolean;
value: V | null;
valueOption: {
label?: string;
value: V | null;
};
translations: {
filterFurther?: string;
moreMatchesFound?: string;
noMatchesFound?: string;
oneMatchesFound?: string;
required?: {
hint?: string;
messageDetail?: string;
messageSummary?: string;
};
searchField?: string;
};
addEventListener<T extends keyof ojSelectOneEventMap<K, D, V>>(type: T, listener: (this: HTMLElement, ev: ojSelectOneEventMap<K, D, V>[T]) => any, options?: (boolean |
AddEventListenerOptions)): void;
addEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: (boolean | AddEventListenerOptions)): void;
getProperty<T extends keyof ojSelectOneSettableProperties<K, D, V>>(property: T): ojSelectOne<K, D, V>[T];
getProperty(property: string): any;
setProperty<T extends keyof ojSelectOneSettableProperties<K, D, V>>(property: T, value: ojSelectOneSettableProperties<K, D, V>[T]): void;
setProperty<T extends string>(property: T, value: JetSetPropertyType<T, ojSelectOneSettableProperties<K, D, V>>): void;
setProperties(properties: ojSelectOneSettablePropertiesLenient<K, D, V>): void;
}
export namespace ojSelectOne {
interface ojAnimateEnd extends CustomEvent<{
action: string;
element: Element;
[propName: string]: any;
}> {
}
interface ojAnimateStart extends CustomEvent<{
action: string;
element: Element;
endCallback: (() => void);
[propName: string]: any;
}> {
}
// tslint:disable-next-line interface-over-type-literal
type labelledByChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["labelledBy"]>;
// tslint:disable-next-line interface-over-type-literal
type maximumResultCountChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["maximumResultCount"]>;
// tslint:disable-next-line interface-over-type-literal
type minimumResultsForSearchChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["minimumResultsForSearch"]>;
// tslint:disable-next-line interface-over-type-literal
type optionRendererChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["optionRenderer"]>;
// tslint:disable-next-line interface-over-type-literal
type optionsChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["options"]>;
// tslint:disable-next-line interface-over-type-literal
type optionsKeysChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["optionsKeys"]>;
// tslint:disable-next-line interface-over-type-literal
type pickerAttributesChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["pickerAttributes"]>;
// tslint:disable-next-line interface-over-type-literal
type placeholderChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["placeholder"]>;
// tslint:disable-next-line interface-over-type-literal
type readonlyChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["readonly"]>;
// tslint:disable-next-line interface-over-type-literal
type renderModeChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["renderMode"]>;
// tslint:disable-next-line interface-over-type-literal
type requiredChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["required"]>;
// tslint:disable-next-line interface-over-type-literal
type valueChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["value"]>;
// tslint:disable-next-line interface-over-type-literal
type valueOptionChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["valueOption"]>;
//------------------------------------------------------------
// Start: generated events for inherited properties
//------------------------------------------------------------
// tslint:disable-next-line interface-over-type-literal
type describedByChanged<K, D, V = any> = ojSelect.describedByChanged<V, ojSelectOneSettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type disabledChanged<K, D, V = any> = ojSelect.disabledChanged<V, ojSelectOneSettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type displayOptionsChanged<K, D, V = any> = ojSelect.displayOptionsChanged<V, ojSelectOneSettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type helpChanged<K, D, V = any> = ojSelect.helpChanged<V, ojSelectOneSettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type helpHintsChanged<K, D, V = any> = ojSelect.helpHintsChanged<V, ojSelectOneSettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type labelEdgeChanged<K, D, V = any> = ojSelect.labelEdgeChanged<V, ojSelectOneSettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type labelHintChanged<K, D, V = any> = ojSelect.labelHintChanged<V, ojSelectOneSettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type messagesCustomChanged<K, D, V = any> = ojSelect.messagesCustomChanged<V, ojSelectOneSettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type userAssistanceDensityChanged<K, D, V = any> = ojSelect.userAssistanceDensityChanged<V, ojSelectOneSettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type validChanged<K, D, V = any> = ojSelect.validChanged<V, ojSelectOneSettableProperties<K, D, V>>;
//------------------------------------------------------------
// End: generated events for inherited properties
//------------------------------------------------------------
}
export interface ojSelectOneEventMap<K, D, V = any> extends ojSelectEventMap<V, ojSelectOneSettableProperties<K, D, V>> {
'ojAnimateEnd': ojSelectOne.ojAnimateEnd;
'ojAnimateStart': ojSelectOne.ojAnimateStart;
'labelledByChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["labelledBy"]>;
'maximumResultCountChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["maximumResultCount"]>;
'minimumResultsForSearchChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["minimumResultsForSearch"]>;
'optionRendererChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["optionRenderer"]>;
'optionsChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["options"]>;
'optionsKeysChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["optionsKeys"]>;
'pickerAttributesChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["pickerAttributes"]>;
'placeholderChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["placeholder"]>;
'readonlyChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["readonly"]>;
'renderModeChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["renderMode"]>;
'requiredChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["required"]>;
'valueChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["value"]>;
'valueOptionChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["valueOption"]>;
'describedByChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["describedBy"]>;
'disabledChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["disabled"]>;
'displayOptionsChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["displayOptions"]>;
'helpChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["help"]>;
'helpHintsChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["helpHints"]>;
'labelEdgeChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["labelEdge"]>;
'labelHintChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["labelHint"]>;
'messagesCustomChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["messagesCustom"]>;
'userAssistanceDensityChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["userAssistanceDensity"]>;
'validChanged': JetElementCustomEvent<ojSelectOne<K, D, V>["valid"]>;
}
export interface ojSelectOneSettableProperties<K, D, V = any> extends ojSelectSettableProperties<V> {
labelledBy: string | null;
maximumResultCount: number;
minimumResultsForSearch: number;
optionRenderer?: ((param0: ojSelect.OptionContext<D>) => Element) | null;
options: Array<ojSelect.Option | ojSelect.Optgroup> | DataProvider<K, D> | null;
optionsKeys: ojSelect.OptionsKeys | null;
pickerAttributes: {
class?: string;
style?: string;
};
placeholder: string | null;
readonly: boolean;
renderMode: 'jet' | 'native';
required: boolean;
value: V | null;
valueOption: {
label?: string;
value: V | null;
};
translations: {
filterFurther?: string;
moreMatchesFound?: string;
noMatchesFound?: string;
oneMatchesFound?: string;
required?: {
hint?: string;
messageDetail?: string;
messageSummary?: string;
};
searchField?: string;
};
}
export interface ojSelectOneSettablePropertiesLenient<K, D, V = any> extends Partial<ojSelectOneSettableProperties<K, D, V>> {
[key: string]: any;
}
export interface Optgroup {
children: Array<(Option | Optgroup)>;
disabled?: boolean;
label: string;
}
export interface Option {
disabled?: boolean;
label?: string;
value: object;
}
export type ComboboxElement<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = ojCombobox<V, SP, SV, RV>;
export type ComboboxManyElement<K, D, V = any> = ojComboboxMany<K, D, V>;
export type ComboboxOneElement<K, D, V = any> = ojComboboxOne<K, D, V>;
export type SelectElement<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = ojSelect<V, SP, SV>;
export type SelectManyElement<K, D, V = any> = ojSelectMany<K, D, V>;
export type SelectOneElement<K, D, V = any> = ojSelectOne<K, D, V>;
export namespace ComboboxElement {
interface ojAnimateEnd extends CustomEvent<{
action: string;
element: Element;
[propName: string]: any;
}> {
}
interface ojAnimateStart extends CustomEvent<{
action: string;
element: Element;
endCallback: (() => void);
[propName: string]: any;
}> {
}
// tslint:disable-next-line interface-over-type-literal
type displayOptionsChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = JetElementCustomEvent<ojCombobox<V, SP, SV, RV>["displayOptions"]>;
//------------------------------------------------------------
// Start: generated events for inherited properties
//------------------------------------------------------------
// tslint:disable-next-line interface-over-type-literal
type describedByChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.describedByChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type disabledChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.disabledChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type helpChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.helpChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type helpHintsChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.helpHintsChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type labelEdgeChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.labelEdgeChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type labelHintChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.labelHintChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type messagesCustomChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.messagesCustomChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type userAssistanceDensityChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.userAssistanceDensityChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type validChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.validChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type valueChanged<V, SP extends ojComboboxSettableProperties<V, SV, RV>, SV = V, RV = V> = editableValue.valueChanged<V, SP, SV, RV>;
// tslint:disable-next-line interface-over-type-literal
type Optgroup = {
children: Array<ojCombobox.Option | ojCombobox.Optgroup>;
disabled?: boolean;
label: string;
};
// tslint:disable-next-line interface-over-type-literal
type OptionContext<D = any> = {
componentElement: Element;
data: D;
depth: number;
index: number;
leaf: boolean;
parent: Element;
parentElement: Element;
};
}
export namespace ComboboxManyElement {
interface ojAnimateEnd extends CustomEvent<{
action: string;
element: Element;
[propName: string]: any;
}> {
}
interface ojAnimateStart extends CustomEvent<{
action: string;
element: Element;
endCallback: (() => void);
[propName: string]: any;
}> {
}
// tslint:disable-next-line interface-over-type-literal
type asyncValidatorsChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["asyncValidators"]>;
// tslint:disable-next-line interface-over-type-literal
type converterChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["converter"]>;
// tslint:disable-next-line interface-over-type-literal
type labelledByChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["labelledBy"]>;
// tslint:disable-next-line interface-over-type-literal
type maximumResultCountChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["maximumResultCount"]>;
// tslint:disable-next-line interface-over-type-literal
type minLengthChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["minLength"]>;
// tslint:disable-next-line interface-over-type-literal
type optionRendererChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["optionRenderer"]>;
// tslint:disable-next-line interface-over-type-literal
type optionsChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["options"]>;
// tslint:disable-next-line interface-over-type-literal
type optionsKeysChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["optionsKeys"]>;
// tslint:disable-next-line interface-over-type-literal
type pickerAttributesChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["pickerAttributes"]>;
// tslint:disable-next-line interface-over-type-literal
type placeholderChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["placeholder"]>;
// tslint:disable-next-line interface-over-type-literal
type rawValueChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["rawValue"]>;
// tslint:disable-next-line interface-over-type-literal
type readonlyChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["readonly"]>;
// tslint:disable-next-line interface-over-type-literal
type requiredChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["required"]>;
// tslint:disable-next-line interface-over-type-literal
type validatorsChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["validators"]>;
// tslint:disable-next-line interface-over-type-literal
type valueChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["value"]>;
// tslint:disable-next-line interface-over-type-literal
type valueOptionsChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxMany<K, D, V>["valueOptions"]>;
//------------------------------------------------------------
// Start: generated events for inherited properties
//------------------------------------------------------------
// tslint:disable-next-line interface-over-type-literal
type describedByChanged<K, D, V = any> = ojCombobox.describedByChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
// tslint:disable-next-line interface-over-type-literal
type disabledChanged<K, D, V = any> = ojCombobox.disabledChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
// tslint:disable-next-line interface-over-type-literal
type displayOptionsChanged<K, D, V = any> = ojCombobox.displayOptionsChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
// tslint:disable-next-line interface-over-type-literal
type helpChanged<K, D, V = any> = ojCombobox.helpChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
// tslint:disable-next-line interface-over-type-literal
type helpHintsChanged<K, D, V = any> = ojCombobox.helpHintsChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
// tslint:disable-next-line interface-over-type-literal
type labelEdgeChanged<K, D, V = any> = ojCombobox.labelEdgeChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
// tslint:disable-next-line interface-over-type-literal
type labelHintChanged<K, D, V = any> = ojCombobox.labelHintChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
// tslint:disable-next-line interface-over-type-literal
type messagesCustomChanged<K, D, V = any> = ojCombobox.messagesCustomChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
// tslint:disable-next-line interface-over-type-literal
type userAssistanceDensityChanged<K, D, V = any> = ojCombobox.userAssistanceDensityChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
// tslint:disable-next-line interface-over-type-literal
type validChanged<K, D, V = any> = ojCombobox.validChanged<V[], ojComboboxManySettableProperties<K, D, V>, V[], string[]>;
//------------------------------------------------------------
// End: generated events for inherited properties
//------------------------------------------------------------
}
export namespace ComboboxOneElement {
interface ojAnimateEnd extends CustomEvent<{
action: string;
element: Element;
[propName: string]: any;
}> {
}
interface ojAnimateStart extends CustomEvent<{
action: string;
element: Element;
endCallback: (() => void);
[propName: string]: any;
}> {
}
interface ojValueUpdated extends CustomEvent<{
previousValue: any;
value: any;
[propName: string]: any;
}> {
}
// tslint:disable-next-line interface-over-type-literal
type asyncValidatorsChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["asyncValidators"]>;
// tslint:disable-next-line interface-over-type-literal
type converterChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["converter"]>;
// tslint:disable-next-line interface-over-type-literal
type filterOnOpenChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["filterOnOpen"]>;
// tslint:disable-next-line interface-over-type-literal
type labelledByChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["labelledBy"]>;
// tslint:disable-next-line interface-over-type-literal
type maximumResultCountChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["maximumResultCount"]>;
// tslint:disable-next-line interface-over-type-literal
type minLengthChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["minLength"]>;
// tslint:disable-next-line interface-over-type-literal
type optionRendererChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["optionRenderer"]>;
// tslint:disable-next-line interface-over-type-literal
type optionsChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["options"]>;
// tslint:disable-next-line interface-over-type-literal
type optionsKeysChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["optionsKeys"]>;
// tslint:disable-next-line interface-over-type-literal
type pickerAttributesChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["pickerAttributes"]>;
// tslint:disable-next-line interface-over-type-literal
type placeholderChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["placeholder"]>;
// tslint:disable-next-line interface-over-type-literal
type rawValueChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["rawValue"]>;
// tslint:disable-next-line interface-over-type-literal
type readonlyChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["readonly"]>;
// tslint:disable-next-line interface-over-type-literal
type requiredChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["required"]>;
// tslint:disable-next-line interface-over-type-literal
type validatorsChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["validators"]>;
// tslint:disable-next-line interface-over-type-literal
type valueChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["value"]>;
// tslint:disable-next-line interface-over-type-literal
type valueOptionChanged<K, D, V = any> = JetElementCustomEvent<ojComboboxOne<K, D, V>["valueOption"]>;
//------------------------------------------------------------
// Start: generated events for inherited properties
//------------------------------------------------------------
// tslint:disable-next-line interface-over-type-literal
type describedByChanged<K, D, V = any> = ojCombobox.describedByChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
// tslint:disable-next-line interface-over-type-literal
type disabledChanged<K, D, V = any> = ojCombobox.disabledChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
// tslint:disable-next-line interface-over-type-literal
type displayOptionsChanged<K, D, V = any> = ojCombobox.displayOptionsChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
// tslint:disable-next-line interface-over-type-literal
type helpChanged<K, D, V = any> = ojCombobox.helpChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
// tslint:disable-next-line interface-over-type-literal
type helpHintsChanged<K, D, V = any> = ojCombobox.helpHintsChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
// tslint:disable-next-line interface-over-type-literal
type labelEdgeChanged<K, D, V = any> = ojCombobox.labelEdgeChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
// tslint:disable-next-line interface-over-type-literal
type labelHintChanged<K, D, V = any> = ojCombobox.labelHintChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
// tslint:disable-next-line interface-over-type-literal
type messagesCustomChanged<K, D, V = any> = ojCombobox.messagesCustomChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
// tslint:disable-next-line interface-over-type-literal
type userAssistanceDensityChanged<K, D, V = any> = ojCombobox.userAssistanceDensityChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
// tslint:disable-next-line interface-over-type-literal
type validChanged<K, D, V = any> = ojCombobox.validChanged<V, ojComboboxOneSettableProperties<K, D, V>, V, string>;
//------------------------------------------------------------
// End: generated events for inherited properties
//------------------------------------------------------------
}
export namespace SelectElement {
interface ojAnimateEnd extends CustomEvent<{
action: string;
element: Element;
[propName: string]: any;
}> {
}
interface ojAnimateStart extends CustomEvent<{
action: string;
element: Element;
endCallback: (() => void);
[propName: string]: any;
}> {
}
// tslint:disable-next-line interface-over-type-literal
type displayOptionsChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = JetElementCustomEvent<ojSelect<V, SP, SV>["displayOptions"]>;
// tslint:disable-next-line interface-over-type-literal
type labelledByChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = JetElementCustomEvent<ojSelect<V, SP, SV>["labelledBy"]>;
//------------------------------------------------------------
// Start: generated events for inherited properties
//------------------------------------------------------------
// tslint:disable-next-line interface-over-type-literal
type describedByChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.describedByChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type disabledChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.disabledChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type helpChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.helpChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type helpHintsChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.helpHintsChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type labelEdgeChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.labelEdgeChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type labelHintChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.labelHintChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type messagesCustomChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.messagesCustomChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type userAssistanceDensityChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.userAssistanceDensityChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type validChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.validChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type valueChanged<V, SP extends ojSelectSettableProperties<V, SV>, SV = V> = editableValue.valueChanged<V, SP, SV>;
// tslint:disable-next-line interface-over-type-literal
type Optgroup = {
children: Array<(ojSelect.Option | ojSelect.Optgroup)>;
disabled?: boolean;
label: string;
};
// tslint:disable-next-line interface-over-type-literal
type OptionContext<D = any> = {
componentElement: Element;
data: D;
depth: number;
index: number;
leaf: boolean;
parent: Element;
parentElement: Element;
};
}
export namespace SelectManyElement {
interface ojAnimateEnd extends CustomEvent<{
action: string;
element: Element;
[propName: string]: any;
}> {
}
interface ojAnimateStart extends CustomEvent<{
action: string;
element: Element;
endCallback: (() => void);
[propName: string]: any;
}> {
}
// tslint:disable-next-line interface-over-type-literal
type labelledByChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["labelledBy"]>;
// tslint:disable-next-line interface-over-type-literal
type maximumResultCountChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["maximumResultCount"]>;
// tslint:disable-next-line interface-over-type-literal
type minimumResultsForSearchChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["minimumResultsForSearch"]>;
// tslint:disable-next-line interface-over-type-literal
type optionRendererChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["optionRenderer"]>;
// tslint:disable-next-line interface-over-type-literal
type optionsChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["options"]>;
// tslint:disable-next-line interface-over-type-literal
type optionsKeysChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["optionsKeys"]>;
// tslint:disable-next-line interface-over-type-literal
type pickerAttributesChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["pickerAttributes"]>;
// tslint:disable-next-line interface-over-type-literal
type placeholderChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["placeholder"]>;
// tslint:disable-next-line interface-over-type-literal
type readonlyChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["readonly"]>;
// tslint:disable-next-line interface-over-type-literal
type renderModeChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["renderMode"]>;
// tslint:disable-next-line interface-over-type-literal
type requiredChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["required"]>;
// tslint:disable-next-line interface-over-type-literal
type valueChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["value"]>;
// tslint:disable-next-line interface-over-type-literal
type valueOptionsChanged<K, D, V = any> = JetElementCustomEvent<ojSelectMany<K, D, V>["valueOptions"]>;
//------------------------------------------------------------
// Start: generated events for inherited properties
//------------------------------------------------------------
// tslint:disable-next-line interface-over-type-literal
type describedByChanged<K, D, V = any> = ojSelect.describedByChanged<V[], ojSelectManySettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type disabledChanged<K, D, V = any> = ojSelect.disabledChanged<V[], ojSelectManySettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type displayOptionsChanged<K, D, V = any> = ojSelect.displayOptionsChanged<V[], ojSelectManySettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type helpChanged<K, D, V = any> = ojSelect.helpChanged<V[], ojSelectManySettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type helpHintsChanged<K, D, V = any> = ojSelect.helpHintsChanged<V[], ojSelectManySettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type labelEdgeChanged<K, D, V = any> = ojSelect.labelEdgeChanged<V[], ojSelectManySettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type labelHintChanged<K, D, V = any> = ojSelect.labelHintChanged<V[], ojSelectManySettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type messagesCustomChanged<K, D, V = any> = ojSelect.messagesCustomChanged<V[], ojSelectManySettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type userAssistanceDensityChanged<K, D, V = any> = ojSelect.userAssistanceDensityChanged<V[], ojSelectManySettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type validChanged<K, D, V = any> = ojSelect.validChanged<V[], ojSelectManySettableProperties<K, D, V>>;
//------------------------------------------------------------
// End: generated events for inherited properties
//------------------------------------------------------------
}
export namespace SelectOneElement {
interface ojAnimateEnd extends CustomEvent<{
action: string;
element: Element;
[propName: string]: any;
}> {
}
interface ojAnimateStart extends CustomEvent<{
action: string;
element: Element;
endCallback: (() => void);
[propName: string]: any;
}> {
}
// tslint:disable-next-line interface-over-type-literal
type labelledByChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["labelledBy"]>;
// tslint:disable-next-line interface-over-type-literal
type maximumResultCountChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["maximumResultCount"]>;
// tslint:disable-next-line interface-over-type-literal
type minimumResultsForSearchChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["minimumResultsForSearch"]>;
// tslint:disable-next-line interface-over-type-literal
type optionRendererChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["optionRenderer"]>;
// tslint:disable-next-line interface-over-type-literal
type optionsChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["options"]>;
// tslint:disable-next-line interface-over-type-literal
type optionsKeysChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["optionsKeys"]>;
// tslint:disable-next-line interface-over-type-literal
type pickerAttributesChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["pickerAttributes"]>;
// tslint:disable-next-line interface-over-type-literal
type placeholderChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["placeholder"]>;
// tslint:disable-next-line interface-over-type-literal
type readonlyChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["readonly"]>;
// tslint:disable-next-line interface-over-type-literal
type renderModeChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["renderMode"]>;
// tslint:disable-next-line interface-over-type-literal
type requiredChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["required"]>;
// tslint:disable-next-line interface-over-type-literal
type valueChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["value"]>;
// tslint:disable-next-line interface-over-type-literal
type valueOptionChanged<K, D, V = any> = JetElementCustomEvent<ojSelectOne<K, D, V>["valueOption"]>;
//------------------------------------------------------------
// Start: generated events for inherited properties
//------------------------------------------------------------
// tslint:disable-next-line interface-over-type-literal
type describedByChanged<K, D, V = any> = ojSelect.describedByChanged<V, ojSelectOneSettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type disabledChanged<K, D, V = any> = ojSelect.disabledChanged<V, ojSelectOneSettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type displayOptionsChanged<K, D, V = any> = ojSelect.displayOptionsChanged<V, ojSelectOneSettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type helpChanged<K, D, V = any> = ojSelect.helpChanged<V, ojSelectOneSettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type helpHintsChanged<K, D, V = any> = ojSelect.helpHintsChanged<V, ojSelectOneSettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type labelEdgeChanged<K, D, V = any> = ojSelect.labelEdgeChanged<V, ojSelectOneSettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type labelHintChanged<K, D, V = any> = ojSelect.labelHintChanged<V, ojSelectOneSettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type messagesCustomChanged<K, D, V = any> = ojSelect.messagesCustomChanged<V, ojSelectOneSettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type userAssistanceDensityChanged<K, D, V = any> = ojSelect.userAssistanceDensityChanged<V, ojSelectOneSettableProperties<K, D, V>>;
// tslint:disable-next-line interface-over-type-literal
type validChanged<K, D, V = any> = ojSelect.validChanged<V, ojSelectOneSettableProperties<K, D, V>>;
//------------------------------------------------------------
// End: generated events for inherited properties
//------------------------------------------------------------
}
export interface ComboboxManyIntrinsicProps extends Partial<Readonly<ojComboboxManySettableProperties<any, any, any>>>, GlobalProps, Pick<preact.JSX.HTMLAttributes, 'ref' | 'key'> {
onojAnimateEnd?: (value: ojComboboxManyEventMap<any, any, any>['ojAnimateEnd']) => void;
onojAnimateStart?: (value: ojComboboxManyEventMap<any, any, any>['ojAnimateStart']) => void;
onasyncValidatorsChanged?: (value: ojComboboxManyEventMap<any, any, any>['asyncValidatorsChanged']) => void;
onconverterChanged?: (value: ojComboboxManyEventMap<any, any, any>['converterChanged']) => void;
onlabelledByChanged?: (value: ojComboboxManyEventMap<any, any, any>['labelledByChanged']) => void;
onmaximumResultCountChanged?: (value: ojComboboxManyEventMap<any, any, any>['maximumResultCountChanged']) => void;
onminLengthChanged?: (value: ojComboboxManyEventMap<any, any, any>['minLengthChanged']) => void;
onoptionRendererChanged?: (value: ojComboboxManyEventMap<any, any, any>['optionRendererChanged']) => void;
onoptionsChanged?: (value: ojComboboxManyEventMap<any, any, any>['optionsChanged']) => void;
onoptionsKeysChanged?: (value: ojComboboxManyEventMap<any, any, any>['optionsKeysChanged']) => void;
onpickerAttributesChanged?: (value: ojComboboxManyEventMap<any, any, any>['pickerAttributesChanged']) => void;
onplaceholderChanged?: (value: ojComboboxManyEventMap<any, any, any>['placeholderChanged']) => void;
onrawValueChanged?: (value: ojComboboxManyEventMap<any, any, any>['rawValueChanged']) => void;
onreadonlyChanged?: (value: ojComboboxManyEventMap<any, any, any>['readonlyChanged']) => void;
onrequiredChanged?: (value: ojComboboxManyEventMap<any, any, any>['requiredChanged']) => void;
onvalidatorsChanged?: (value: ojComboboxManyEventMap<any, any, any>['validatorsChanged']) => void;
onvalueChanged?: (value: ojComboboxManyEventMap<any, any, any>['valueChanged']) => void;
onvalueOptionsChanged?: (value: ojComboboxManyEventMap<any, any, any>['valueOptionsChanged']) => void;
ondescribedByChanged?: (value: ojComboboxManyEventMap<any, any, any>['describedByChanged']) => void;
ondisabledChanged?: (value: ojComboboxManyEventMap<any, any, any>['disabledChanged']) => void;
ondisplayOptionsChanged?: (value: ojComboboxManyEventMap<any, any, any>['displayOptionsChanged']) => void;
onhelpChanged?: (value: ojComboboxManyEventMap<any, any, any>['helpChanged']) => void;
onhelpHintsChanged?: (value: ojComboboxManyEventMap<any, any, any>['helpHintsChanged']) => void;
onlabelEdgeChanged?: (value: ojComboboxManyEventMap<any, any, any>['labelEdgeChanged']) => void;
onlabelHintChanged?: (value: ojComboboxManyEventMap<any, any, any>['labelHintChanged']) => void;
onmessagesCustomChanged?: (value: ojComboboxManyEventMap<any, any, any>['messagesCustomChanged']) => void;
onuserAssistanceDensityChanged?: (value: ojComboboxManyEventMap<any, any, any>['userAssistanceDensityChanged']) => void;
onvalidChanged?: (value: ojComboboxManyEventMap<any, any, any>['validChanged']) => void;
children?: ComponentChildren;
}
export interface ComboboxOneIntrinsicProps extends Partial<Readonly<ojComboboxOneSettableProperties<any, any, any>>>, GlobalProps, Pick<preact.JSX.HTMLAttributes, 'ref' | 'key'> {
onojAnimateEnd?: (value: ojComboboxOneEventMap<any, any, any>['ojAnimateEnd']) => void;
onojAnimateStart?: (value: ojComboboxOneEventMap<any, any, any>['ojAnimateStart']) => void;
onojValueUpdated?: (value: ojComboboxOneEventMap<any, any, any>['ojValueUpdated']) => void;
onasyncValidatorsChanged?: (value: ojComboboxOneEventMap<any, any, any>['asyncValidatorsChanged']) => void;
onconverterChanged?: (value: ojComboboxOneEventMap<any, any, any>['converterChanged']) => void;
onfilterOnOpenChanged?: (value: ojComboboxOneEventMap<any, any, any>['filterOnOpenChanged']) => void;
onlabelledByChanged?: (value: ojComboboxOneEventMap<any, any, any>['labelledByChanged']) => void;
onmaximumResultCountChanged?: (value: ojComboboxOneEventMap<any, any, any>['maximumResultCountChanged']) => void;
onminLengthChanged?: (value: ojComboboxOneEventMap<any, any, any>['minLengthChanged']) => void;
onoptionRendererChanged?: (value: ojComboboxOneEventMap<any, any, any>['optionRendererChanged']) => void;
onoptionsChanged?: (value: ojComboboxOneEventMap<any, any, any>['optionsChanged']) => void;
onoptionsKeysChanged?: (value: ojComboboxOneEventMap<any, any, any>['optionsKeysChanged']) => void;
onpickerAttributesChanged?: (value: ojComboboxOneEventMap<any, any, any>['pickerAttributesChanged']) => void;
onplaceholderChanged?: (value: ojComboboxOneEventMap<any, any, any>['placeholderChanged']) => void;
onrawValueChanged?: (value: ojComboboxOneEventMap<any, any, any>['rawValueChanged']) => void;
onreadonlyChanged?: (value: ojComboboxOneEventMap<any, any, any>['readonlyChanged']) => void;
onrequiredChanged?: (value: ojComboboxOneEventMap<any, any, any>['requiredChanged']) => void;
onvalidatorsChanged?: (value: ojComboboxOneEventMap<any, any, any>['validatorsChanged']) => void;
onvalueChanged?: (value: ojComboboxOneEventMap<any, any, any>['valueChanged']) => void;
onvalueOptionChanged?: (value: ojComboboxOneEventMap<any, any, any>['valueOptionChanged']) => void;
ondescribedByChanged?: (value: ojComboboxOneEventMap<any, any, any>['describedByChanged']) => void;
ondisabledChanged?: (value: ojComboboxOneEventMap<any, any, any>['disabledChanged']) => void;
ondisplayOptionsChanged?: (value: ojComboboxOneEventMap<any, any, any>['displayOptionsChanged']) => void;
onhelpChanged?: (value: ojComboboxOneEventMap<any, any, any>['helpChanged']) => void;
onhelpHintsChanged?: (value: ojComboboxOneEventMap<any, any, any>['helpHintsChanged']) => void;
onlabelEdgeChanged?: (value: ojComboboxOneEventMap<any, any, any>['labelEdgeChanged']) => void;
onlabelHintChanged?: (value: ojComboboxOneEventMap<any, any, any>['labelHintChanged']) => void;
onmessagesCustomChanged?: (value: ojComboboxOneEventMap<any, any, any>['messagesCustomChanged']) => void;
onuserAssistanceDensityChanged?: (value: ojComboboxOneEventMap<any, any, any>['userAssistanceDensityChanged']) => void;
onvalidChanged?: (value: ojComboboxOneEventMap<any, any, any>['validChanged']) => void;
children?: ComponentChildren;
}
export interface SelectManyIntrinsicProps extends Partial<Readonly<ojSelectManySettableProperties<any, any, any>>>, GlobalProps, Pick<preact.JSX.HTMLAttributes, 'ref' | 'key'> {
onojAnimateEnd?: (value: ojSelectManyEventMap<any, any, any>['ojAnimateEnd']) => void;
onojAnimateStart?: (value: ojSelectManyEventMap<any, any, any>['ojAnimateStart']) => void;
onlabelledByChanged?: (value: ojSelectManyEventMap<any, any, any>['labelledByChanged']) => void;
onmaximumResultCountChanged?: (value: ojSelectManyEventMap<any, any, any>['maximumResultCountChanged']) => void;
onminimumResultsForSearchChanged?: (value: ojSelectManyEventMap<any, any, any>['minimumResultsForSearchChanged']) => void;
onoptionRendererChanged?: (value: ojSelectManyEventMap<any, any, any>['optionRendererChanged']) => void;
onoptionsChanged?: (value: ojSelectManyEventMap<any, any, any>['optionsChanged']) => void;
onoptionsKeysChanged?: (value: ojSelectManyEventMap<any, any, any>['optionsKeysChanged']) => void;
onpickerAttributesChanged?: (value: ojSelectManyEventMap<any, any, any>['pickerAttributesChanged']) => void;
onplaceholderChanged?: (value: ojSelectManyEventMap<any, any, any>['placeholderChanged']) => void;
onreadonlyChanged?: (value: ojSelectManyEventMap<any, any, any>['readonlyChanged']) => void;
onrenderModeChanged?: (value: ojSelectManyEventMap<any, any, any>['renderModeChanged']) => void;
onrequiredChanged?: (value: ojSelectManyEventMap<any, any, any>['requiredChanged']) => void;
onvalueChanged?: (value: ojSelectManyEventMap<any, any, any>['valueChanged']) => void;
onvalueOptionsChanged?: (value: ojSelectManyEventMap<any, any, any>['valueOptionsChanged']) => void;
ondescribedByChanged?: (value: ojSelectManyEventMap<any, any, any>['describedByChanged']) => void;
ondisabledChanged?: (value: ojSelectManyEventMap<any, any, any>['disabledChanged']) => void;
ondisplayOptionsChanged?: (value: ojSelectManyEventMap<any, any, any>['displayOptionsChanged']) => void;
onhelpChanged?: (value: ojSelectManyEventMap<any, any, any>['helpChanged']) => void;
onhelpHintsChanged?: (value: ojSelectManyEventMap<any, any, any>['helpHintsChanged']) => void;
onlabelEdgeChanged?: (value: ojSelectManyEventMap<any, any, any>['labelEdgeChanged']) => void;
onlabelHintChanged?: (value: ojSelectManyEventMap<any, any, any>['labelHintChanged']) => void;
onmessagesCustomChanged?: (value: ojSelectManyEventMap<any, any, any>['messagesCustomChanged']) => void;
onuserAssistanceDensityChanged?: (value: ojSelectManyEventMap<any, any, any>['userAssistanceDensityChanged']) => void;
onvalidChanged?: (value: ojSelectManyEventMap<any, any, any>['validChanged']) => void;
children?: ComponentChildren;
}
export interface SelectOneIntrinsicProps extends Partial<Readonly<ojSelectOneSettableProperties<any, any, any>>>, GlobalProps, Pick<preact.JSX.HTMLAttributes, 'ref' | 'key'> {
onojAnimateEnd?: (value: ojSelectOneEventMap<any, any, any>['ojAnimateEnd']) => void;
onojAnimateStart?: (value: ojSelectOneEventMap<any, any, any>['ojAnimateStart']) => void;
onlabelledByChanged?: (value: ojSelectOneEventMap<any, any, any>['labelledByChanged']) => void;
onmaximumResultCountChanged?: (value: ojSelectOneEventMap<any, any, any>['maximumResultCountChanged']) => void;
onminimumResultsForSearchChanged?: (value: ojSelectOneEventMap<any, any, any>['minimumResultsForSearchChanged']) => void;
onoptionRendererChanged?: (value: ojSelectOneEventMap<any, any, any>['optionRendererChanged']) => void;
onoptionsChanged?: (value: ojSelectOneEventMap<any, any, any>['optionsChanged']) => void;
onoptionsKeysChanged?: (value: ojSelectOneEventMap<any, any, any>['optionsKeysChanged']) => void;
onpickerAttributesChanged?: (value: ojSelectOneEventMap<any, any, any>['pickerAttributesChanged']) => void;
onplaceholderChanged?: (value: ojSelectOneEventMap<any, any, any>['placeholderChanged']) => void;
onreadonlyChanged?: (value: ojSelectOneEventMap<any, any, any>['readonlyChanged']) => void;
onrenderModeChanged?: (value: ojSelectOneEventMap<any, any, any>['renderModeChanged']) => void;
onrequiredChanged?: (value: ojSelectOneEventMap<any, any, any>['requiredChanged']) => void;
onvalueChanged?: (value: ojSelectOneEventMap<any, any, any>['valueChanged']) => void;
onvalueOptionChanged?: (value: ojSelectOneEventMap<any, any, any>['valueOptionChanged']) => void;
ondescribedByChanged?: (value: ojSelectOneEventMap<any, any, any>['describedByChanged']) => void;
ondisabledChanged?: (value: ojSelectOneEventMap<any, any, any>['disabledChanged']) => void;
ondisplayOptionsChanged?: (value: ojSelectOneEventMap<any, any, any>['displayOptionsChanged']) => void;
onhelpChanged?: (value: ojSelectOneEventMap<any, any, any>['helpChanged']) => void;
onhelpHintsChanged?: (value: ojSelectOneEventMap<any, any, any>['helpHintsChanged']) => void;
onlabelEdgeChanged?: (value: ojSelectOneEventMap<any, any, any>['labelEdgeChanged']) => void;
onlabelHintChanged?: (value: ojSelectOneEventMap<any, any, any>['labelHintChanged']) => void;
onmessagesCustomChanged?: (value: ojSelectOneEventMap<any, any, any>['messagesCustomChanged']) => void;
onuserAssistanceDensityChanged?: (value: ojSelectOneEventMap<any, any, any>['userAssistanceDensityChanged']) => void;
onvalidChanged?: (value: ojSelectOneEventMap<any, any, any>['validChanged']) => void;
children?: ComponentChildren;
}
declare global {
namespace preact.JSX {
interface IntrinsicElements {
"oj-combobox-many": ComboboxManyIntrinsicProps;
"oj-combobox-one": ComboboxOneIntrinsicProps;
"oj-select-many": SelectManyIntrinsicProps;
"oj-select-one": SelectOneIntrinsicProps;
}
}
} | the_stack |
import { getSharedSecret as secpGetSharedSecret, utils, getPublicKey, Point, CURVE } from 'noble-secp256k1';
import { computeAddress, hexlify, hexZeroPad, isHexString, sha256, BigNumber } from '../ethers';
import { RandomNumber } from './RandomNumber';
import { assertValidPoint, lengths, recoverPublicKeyFromTransaction } from '../utils/utils';
import { CompressedPublicKey, EncryptedPayload, EthersProvider } from '../types';
// List of private or public keys that we disallow initializing a KeyPair instance with, since they will lead to
// unrecoverable funds.
const blockedKeys = [
'0x0000000000000000000000000000000000000000000000000000000000000000', // private key of all zeros
'0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', // public key of all zeroes
];
/**
* @notice Private helper method to return the shared secret for a given private key and public key
* @param privateKey Private key as hex string with 0x prefix
* @param publicKey Uncompressed public key as hex string with 0x04 prefix
* @returns 32-byte shared secret as 66 character hex string
*/
function getSharedSecret(privateKey: string, publicKey: string) {
assertValidPoint(publicKey);
if (privateKey.length !== lengths.privateKey || !isHexString(privateKey)) throw new Error('Invalid private key');
if (publicKey.length !== lengths.publicKey || !isHexString(publicKey)) throw new Error('Invalid public key');
// We use sharedSecret.slice(2) to ensure the shared secret is not dependent on the prefix, which enables
// us to uncompress ephemeralPublicKey from Umbra.sol logs as explained in comments of getUncompressedFromX.
// Note that a shared secret is really just a point on the curve, so it's an uncompressed public key
const sharedSecret = secpGetSharedSecret(privateKey.slice(2), publicKey.slice(2), true) as string; // has 04 prefix but not 0x
return sha256(`0x${sharedSecret.slice(2)}`);
}
export class KeyPair {
readonly publicKeyHex: string; // Public key as hex string with 0x04 prefix
readonly privateKeyHex: string | null = null; // Private key as hex string with 0x prefix, or null if not provided
/**
* @notice Creates new instance from a public key or private key
* @param key Can be either (1) hex public key with 0x04 prefix, or (2) hex private key with 0x prefix
*/
constructor(key: string) {
// Input checks
if (typeof key !== 'string' || !isHexString(key)) {
throw new Error('Key must be a string in hex format with 0x prefix');
}
if (blockedKeys.includes(key)) {
throw new Error('Cannot initialize KeyPair with the provided key');
}
// Handle input
if (key.length === lengths.privateKey) {
// Private key provided
this.privateKeyHex = key;
const publicKey = getPublicKey(this.privateKeyHexSlim as string); // hex without 0x prefix but with 04 prefix
this.publicKeyHex = `0x${publicKey}`; // Save off version with 0x prefix, other forms computed as getters
} else if (key.length === lengths.publicKey) {
// Public key provided
assertValidPoint(key); // throw if point is not on curve
this.publicKeyHex = key; // Save off public key, other forms computed as getters
} else {
throw new Error('Key must be a 66 character hex private key or a 132 character hex public key');
}
}
// ===================================================== GETTERS =====================================================
/**
* @notice Returns the private key as a hex string without the 0x prefix
*/
get privateKeyHexSlim() {
return this.privateKeyHex ? this.privateKeyHex.slice(2) : null;
}
/**
* @notice Returns the uncompressed public key as a hex string without the 0x prefix
*/
get publicKeyHexSlim() {
return this.publicKeyHex.slice(2);
}
/**
* @notice Returns checksum address derived from this key
*/
get address() {
return computeAddress(this.publicKeyHex);
}
// ============================================= ENCRYPTION / DECRYPTION =============================================
/**
* @notice Encrypt a number with the instance's public key
* @param randomNumber Number as instance of RandomNumber class
* @returns Hex strings of uncompressed 65 byte public key and 32 byte ciphertext
*/
encrypt(number: RandomNumber): EncryptedPayload {
if (!(number instanceof RandomNumber)) {
throw new Error('Must provide instance of RandomNumber');
}
// Get shared secret to use as encryption key
const ephemeralPrivateKey = hexlify(utils.randomPrivateKey()); // private key as hex with 0x prefix
const ephemeralPublicKey = `0x${getPublicKey(ephemeralPrivateKey.slice(2))}`; // public key as hex with 0x prefix
const sharedSecret = getSharedSecret(ephemeralPrivateKey, this.publicKeyHex);
// XOR random number with shared secret to get encrypted value
const ciphertextBN = number.value.xor(sharedSecret);
const ciphertext = hexZeroPad(ciphertextBN.toHexString(), 32); // 32 byte hex string with 0x prefix
return { ephemeralPublicKey, ciphertext };
}
/**
* @notice Decrypt a random number with the instance's private key and return the plaintext
* @param output Output from the encrypt method, which can be constructed from on-chain events
* @returns Decrypted ciphertext as hex string
*/
decrypt(output: EncryptedPayload) {
const { ephemeralPublicKey, ciphertext } = output;
if (!ephemeralPublicKey || !ciphertext) {
throw new Error('Input must be of type EncryptedPayload to decrypt');
}
if (!this.privateKeyHex) {
throw new Error('KeyPair has no associated private key to decrypt with');
}
assertValidPoint(ephemeralPublicKey); // throw if point is not on curve
// Get shared secret to use as decryption key, then decrypt with XOR
const sharedSecret = getSharedSecret(this.privateKeyHex, ephemeralPublicKey);
const plaintext = BigNumber.from(ciphertext).xor(sharedSecret);
return hexZeroPad(plaintext.toHexString(), 32);
}
// =============================================== ELLIPTIC CURVE MATH ===============================================
/**
* @notice Returns new KeyPair instance after multiplying this public key by some value
* @param value number to multiply by, as RandomNumber or hex string with 0x prefix
*/
mulPublicKey(value: RandomNumber | string) {
if (!(value instanceof RandomNumber) && typeof value !== 'string') {
throw new Error('Input must be instance of RandomNumber or string');
}
if (typeof value === 'string' && !value.startsWith('0x')) {
throw new Error('Strings must be in hex form with 0x prefix');
}
// Parse number based on input type
const number = isHexString(value)
? BigInt(value as string) // provided a valid hex string
: BigInt((value as RandomNumber).asHex); // provided RandomNumber
// Perform the multiplication and return new KeyPair instance
const publicKey = Point.fromHex(this.publicKeyHexSlim).multiply(number);
return new KeyPair(`0x${publicKey.toHex()}`);
}
/**
* @notice Returns new KeyPair instance after multiplying this private key by some value
* @param value number to multiply by, as class RandomNumber or hex string with 0x prefix
*/
mulPrivateKey(value: RandomNumber | string) {
if (!(value instanceof RandomNumber) && typeof value !== 'string') {
throw new Error('Input must be instance of RandomNumber or string');
}
if (typeof value === 'string' && !isHexString(value)) {
throw new Error('Strings must be in hex form with 0x prefix');
}
if (!this.privateKeyHex) {
throw new Error('KeyPair has no associated private key');
}
// Parse number based on input type
const number = isHexString(value)
? BigInt(value as string) // provided a valid hex string
: BigInt((value as RandomNumber).asHex); // provided RandomNumber
// Get new private key. Multiplication gives us an arbitrarily large number that is not necessarily in the domain
// of the secp256k1 curve, so then we use modulus operation to get in the correct range.
const privateKeyBigInt = (BigInt(this.privateKeyHex) * number) % CURVE.n;
const privateKey = hexZeroPad(BigNumber.from(privateKeyBigInt).toHexString(), 32); // convert to 32 byte hex
return new KeyPair(privateKey); // return new KeyPair instance
}
// ================================================= STATIC METHODS ==================================================
/**
* @notice Generate KeyPair instance asynchronously from a transaction hash
* @param txHash Transaction hash to recover public key from
* @param provider ethers provider to use
*/
static async instanceFromTransaction(txHash: string, provider: EthersProvider) {
if (typeof txHash !== 'string' || txHash.length !== lengths.txHash) {
throw new Error('Invalid transaction hash provided');
}
const publicKeyHex = await recoverPublicKeyFromTransaction(txHash, provider);
return new KeyPair(publicKeyHex);
}
/**
* @notice Takes an uncompressed public key and returns the compressed public key
* @param publicKey Uncompressed public key, as hex string starting with 0x
* @returns Object containing the prefix as an integer and compressed public key as hex, as separate parameters
*/
static compressPublicKey(publicKey: string): CompressedPublicKey {
assertValidPoint(publicKey);
const compressedPublicKey = Point.fromHex(publicKey.slice(2)).toHex(true);
return {
prefix: Number(compressedPublicKey[1]), // prefix bit is the 2th character in the string (no 0x prefix)
pubKeyXCoordinate: `0x${compressedPublicKey.slice(2)}`,
};
}
/**
* @notice Given the x-coordinate of a public key, without the identifying prefix bit, returns
* the uncompressed public key assuming the identifying bit is 02
* @dev We don't know if the identifying bit is 02 or 03 when uncompressing for the scanning use case, but it
* doesn't actually matter since we are not deriving an address from the public key. We use the public key to
* compute the shared secret to decrypt the random number, and since that involves multiplying this public key
* by a private key, we can ensure the result is the same shared secret regardless of whether we assume the 02 or
* 03 prefix by using the compressed form of the hex shared secret and ignoring the prefix. Therefore if no prefix
* is provided, we can assume 02, and it's up to the user to make sure they are using this method safely. This is
* done because it saves gas in the Umbra contract
* @param pkx x-coordinate of compressed public key, as BigNumber or hex string
* @param prefix Prefix bit, must be 2 or 3
*/
static getUncompressedFromX(pkx: BigNumber | string, prefix: number | string | undefined = undefined) {
if (!(pkx instanceof BigNumber) && typeof pkx !== 'string') {
throw new Error('Compressed public key must be a BigNumber or string');
}
const hexWithoutPrefix = hexZeroPad(BigNumber.from(pkx).toHexString(), 32).slice(2); // pkx as hex string without 0x prefix
if (!prefix) {
// Only safe to use this branch when uncompressed key is using for scanning your funds
return `0x${Point.fromHex(`02${hexWithoutPrefix}`).toHex()}`;
}
const hexWithPrefix = `0${Number(prefix)}${hexWithoutPrefix}`;
return `0x${Point.fromHex(hexWithPrefix).toHex()}`;
}
} | the_stack |
import assert from "assert";
import Blockchain from "../../src/blockchain";
import { Tipset } from "../../src/things/tipset";
import IpfsHttpClient from "ipfs-http-client";
import { StartDealParams } from "../../src/things/start-deal-params";
import { StorageMarketDataRef } from "../../src/things/storage-market-data-ref";
import { RootCID } from "../../src/things/root-cid";
import {
dealIsInProcess,
StorageDealStatus
} from "../../src/types/storage-deal-status";
import { FilecoinOptionsConfig } from "@ganache/filecoin-options";
describe("Blockchain", () => {
describe("general", () => {
let blockchain: Blockchain;
let blockchain2: Blockchain;
before(async () => {
blockchain = new Blockchain(
FilecoinOptionsConfig.normalize({
logging: {
logger: {
log: () => {}
}
}
})
);
blockchain2 = new Blockchain(
FilecoinOptionsConfig.normalize({
chain: {
ipfsPort: 5002
},
wallet: {
totalAccounts: 2
},
logging: {
logger: {
log: () => {}
}
}
})
);
await blockchain.initialize();
await blockchain2.initialize();
});
after(async () => {
if (blockchain) {
await blockchain.stop();
}
if (blockchain2) {
await blockchain2.stop();
}
});
it("creates multiple accounts", async () => {
const accounts = await blockchain.accountManager.getControllableAccounts();
assert.strictEqual(accounts.length, 10);
assert.notStrictEqual(accounts[0].address, accounts[1].address);
});
it("creates a configurable amount of accounts", async () => {
const accounts = await blockchain2.accountManager.getControllableAccounts();
assert.strictEqual(accounts.length, 2);
});
it("creates new tipset with one block on creation", async () => {
const genesis: Tipset = blockchain.genesisTipset();
assert.strictEqual(genesis.height, 0);
assert.strictEqual(genesis.blocks.length, 1);
});
it("mines a new tipset and creates parent/child relationship between blocks", async () => {
await blockchain.mineTipset();
const genesis: Tipset = blockchain.genesisTipset();
const latest: Tipset = blockchain.latestTipset();
assert.strictEqual(latest.height, 1, "Incorrect height!");
assert(
latest.blocks[0].parents[0].equals(genesis.cids[0]),
"block in latest tipset should have genesis tipset as parent"
);
});
});
describe("interval mining", () => {
it("will mine blocks on an interval", async function () {
this.timeout(10000);
const blockchain = new Blockchain(
FilecoinOptionsConfig.normalize({
miner: {
blockTime: 0.1
},
logging: {
logger: {
log: () => {}
}
}
})
);
try {
await blockchain.initialize();
// After 0.5 seconds, we should have at least 3 blocks and no more than 10 blocks
// Github CI is so unpredictable with their burstable cpus
await new Promise(resolve => setTimeout(resolve, 500));
const latest: Tipset = blockchain.latestTipset();
assert(
latest.height >= 3 || latest.height <= 10,
`Expected between 3 and 10 blocks to be mined, but got ${latest.height}`
);
} finally {
blockchain.stop();
}
});
});
describe("ipfs server", () => {
it("creates an ipfs server", async () => {
const blockchain = new Blockchain(
FilecoinOptionsConfig.normalize({
logging: {
logger: {
log: () => {}
}
}
})
);
try {
await blockchain.initialize();
const ipfs = IpfsHttpClient({
host: "localhost",
port: blockchain.options.chain.ipfsPort,
protocol: "http",
apiPath: "/api/v0"
});
const testData = "this is some data!";
const result = await ipfs.add({
content: testData
});
const cid = result.path;
// This is the exact CID expected from the test data.
assert.strictEqual(
cid,
"QmRjSaq4CDRg4Rbj3wXXeuVVfVE1H3UeQzMt2WKjArh6V9"
);
} finally {
await blockchain.stop();
}
});
});
describe("deal state progression", () => {
let blockchain: Blockchain;
afterEach(async () => {
await blockchain.stop();
});
it("advances state of in process deals on every block", async () => {
blockchain = new Blockchain(
FilecoinOptionsConfig.normalize({
miner: {
mine: false
},
logging: {
logger: {
log: () => {}
}
}
})
);
await blockchain.initialize();
const result = await blockchain.ipfs!.add({
content: "some data"
});
const accounts = await blockchain.accountManager.getControllableAccounts();
const proposal = new StartDealParams({
data: new StorageMarketDataRef({
transferType: "graphsync",
root: new RootCID({
"/": result.path
}),
pieceSize: 0
}),
wallet: accounts[0].address,
miner: blockchain.miner,
epochPrice: 2500n,
minBlocksDuration: 300
});
const { root: proposalCid } = await blockchain.startDeal(proposal);
let currentDeal = await blockchain.dealInfoManager!.get(
proposalCid.value
);
// First state should be validating
assert.strictEqual(currentDeal.state, StorageDealStatus.Validating);
await blockchain.mineTipset();
currentDeal = await blockchain.dealInfoManager!.get(proposalCid.value);
// Next state should be Staged
assert.strictEqual(currentDeal.state, StorageDealStatus.Staged);
await blockchain.mineTipset();
currentDeal = await blockchain.dealInfoManager!.get(proposalCid.value);
// Next state should be ReserveProviderFunds
assert.strictEqual(
currentDeal.state,
StorageDealStatus.ReserveProviderFunds
);
// ... and on and on
// Let's mine all the way to the Sealing state
while (currentDeal.state != StorageDealStatus.Sealing) {
await blockchain.mineTipset();
currentDeal = await blockchain.dealInfoManager!.get(proposalCid.value);
}
// The deal should still be considered in process, since it's still sealing
let deals = await blockchain.dealInfoManager.getDeals();
let inProcessDeals = deals.filter(deal => dealIsInProcess(deal.state));
assert.strictEqual(inProcessDeals.length, 1);
assert.strictEqual(
inProcessDeals[0].proposalCid.root.value,
proposalCid.value
);
// Now let's mine the final tipset, making it active, and check to see that
// the deal was pulled out of the in process array.
await blockchain.mineTipset();
currentDeal = await blockchain.dealInfoManager!.get(proposalCid.value);
assert.strictEqual(currentDeal.state, StorageDealStatus.Active);
deals = await blockchain.dealInfoManager.getDeals();
inProcessDeals = deals.filter(deal => dealIsInProcess(deal.state));
assert.strictEqual(inProcessDeals.length, 0);
});
it("fully advances the state of in process deals when automining", async () => {
blockchain = new Blockchain(
FilecoinOptionsConfig.normalize({
miner: {
blockTime: 0
},
logging: {
logger: {
log: () => {}
}
}
})
);
await blockchain.initialize();
const result = await blockchain.ipfs!.add({
content: "some data"
});
const accounts = await blockchain.accountManager.getControllableAccounts();
const proposal = new StartDealParams({
data: new StorageMarketDataRef({
transferType: "graphsync",
root: new RootCID({
"/": result.path
}),
pieceSize: 0
}),
wallet: accounts[0].address,
miner: blockchain.miner,
epochPrice: 2500n,
minBlocksDuration: 300
});
const { root: proposalCid } = await blockchain.startDeal(proposal);
const deal = await blockchain.dealInfoManager!.get(proposalCid.value);
// Since we're automining, starting the deal will trigger
// the state to be state to be set to active.
assert.strictEqual(deal.state, StorageDealStatus.Active);
// We create 1 tipset per state change. Let's make sure that occurred.
assert.strictEqual(blockchain.tipsetManager.latest.height, 11);
});
});
describe("determinism", () => {
let blockchain: Blockchain;
const expectedAddress =
"t3qdqduswwvsvq72iwppn2vytvq2mt7qi5nensswvawpdkmudnzxooi45edyflgnohrfvijy77pn66247nttzq";
afterEach(async () => {
if (blockchain) {
await blockchain.stop();
}
});
it("creates the expected address from seed", async () => {
blockchain = new Blockchain(
FilecoinOptionsConfig.normalize({
wallet: {
seed: "tim is a swell guy"
},
logging: {
logger: {
log: () => {}
}
}
})
);
await blockchain.initialize();
const accounts = await blockchain.accountManager.getControllableAccounts();
assert.strictEqual(accounts[0].address.value, expectedAddress);
});
it("uses the seed to create a different level of determinism", async () => {
blockchain = new Blockchain(
FilecoinOptionsConfig.normalize({
wallet: {
seed: "tim is a swell person"
},
logging: {
logger: {
log: () => {}
}
}
})
);
await blockchain.initialize();
const accounts = await blockchain.accountManager.getControllableAccounts();
assert.notStrictEqual(accounts[0].address.value, expectedAddress);
});
});
}); | the_stack |
import {
isNamedNode,
resolveLocalIri,
internal_isValidUrl,
} from "../datatypes";
import {
UrlString,
Url,
SolidClientError,
Thing,
ThingLocal,
ThingPersisted,
SolidDataset,
WithChangeLog,
IriString,
hasServerResourceInfo,
} from "../interfaces";
import { DataFactory, subjectToRdfJsQuads } from "../rdfjs.internal";
import { getSourceUrl } from "../resource/resource";
import {
internal_addAdditionsToChangeLog,
internal_addDeletionsToChangeLog,
internal_getReadableValue,
} from "./thing.internal";
import {
freeze,
getLocalNodeIri,
getLocalNodeName,
isBlankNodeId,
isLocalNodeIri,
LocalNodeIri,
} from "../rdf.internal";
import { internal_toIriString } from "../interfaces.internal";
import { getTermAll } from "./get";
/**
* @hidden Scopes are not yet consistently used in Solid and hence not properly implemented in this library yet (the add*() and set*() functions do not respect it yet), so we're not exposing these to developers at this point in time.
*/
export interface GetThingOptions {
/**
* Which Named Graph to extract the Thing from.
*
* If not specified, the Thing will include Quads from all Named Graphs in the given
* [[SolidDataset]].
**/
scope?: Url | UrlString;
}
export function getThing(
solidDataset: SolidDataset,
thingUrl: UrlString | Url,
options?: GetThingOptions
): ThingPersisted | null;
export function getThing(
solidDataset: SolidDataset,
thingUrl: LocalNodeIri,
options?: GetThingOptions
): ThingLocal | null;
export function getThing(
solidDataset: SolidDataset,
thingUrl: UrlString | Url | LocalNodeIri,
options?: GetThingOptions
): Thing | null;
/**
* Extract Quads with a given Subject from a [[SolidDataset]] into a [[Thing]].
*
* @param solidDataset The [[SolidDataset]] to extract the [[Thing]] from.
* @param thingUrl The URL of the desired [[Thing]].
* @param options Not yet implemented.
*/
export function getThing(
solidDataset: SolidDataset,
thingUrl: UrlString | Url | LocalNodeIri,
options: GetThingOptions = {}
): Thing | null {
if (!internal_isValidUrl(thingUrl)) {
throw new ValidThingUrlExpectedError(thingUrl);
}
const graph =
typeof options.scope !== "undefined"
? internal_toIriString(options.scope)
: "default";
const thingsByIri = solidDataset.graphs[graph] ?? {};
const thingIri = internal_toIriString(thingUrl);
const resolvedThingIri =
isLocalNodeIri(thingIri) && hasServerResourceInfo(solidDataset)
? resolveLocalIri(getLocalNodeName(thingIri), getSourceUrl(solidDataset))
: thingIri;
const thing = thingsByIri[resolvedThingIri];
if (typeof thing === "undefined") {
return null;
}
return thing;
}
/**
* Get all [[Thing]]s about which a [[SolidDataset]] contains Quads.
*
* @param solidDataset The [[SolidDataset]] to extract the [[Thing]]s from.
* @param options Not yet implemented.
*/
export function getThingAll(
solidDataset: SolidDataset,
options: GetThingOptions & {
/**
* Can Things local to the current dataset, and having no IRI, be returned ?
*/
acceptBlankNodes?: boolean;
} = { acceptBlankNodes: false }
): Thing[] {
const graph =
typeof options.scope !== "undefined"
? internal_toIriString(options.scope)
: "default";
const thingsByIri = solidDataset.graphs[graph] ?? {};
return Object.values(thingsByIri).filter(
(thing) => !isBlankNodeId(thing.url) || options.acceptBlankNodes
);
}
/**
* Insert a [[Thing]] into a [[SolidDataset]], replacing previous instances of that Thing.
*
* @param solidDataset The SolidDataset to insert a Thing into.
* @param thing The Thing to insert into the given SolidDataset.
* @returns A new SolidDataset equal to the given SolidDataset, but with the given Thing.
*/
export function setThing<Dataset extends SolidDataset>(
solidDataset: Dataset,
thing: Thing
): Dataset & WithChangeLog {
const thingIri =
isThingLocal(thing) && hasServerResourceInfo(solidDataset)
? resolveLocalIri(getLocalNodeName(thing.url), getSourceUrl(solidDataset))
: thing.url;
const defaultGraph = solidDataset.graphs.default;
const updatedDefaultGraph = freeze({
...defaultGraph,
[thingIri]: freeze({ ...thing, url: thingIri }),
});
const updatedGraphs = freeze({
...solidDataset.graphs,
default: updatedDefaultGraph,
});
const subjectNode = DataFactory.namedNode(thingIri);
const deletedThingPredicates =
solidDataset.graphs.default[thingIri]?.predicates;
const deletions =
typeof deletedThingPredicates !== "undefined"
? subjectToRdfJsQuads(
deletedThingPredicates,
subjectNode,
DataFactory.defaultGraph()
)
: [];
const additions = subjectToRdfJsQuads(
thing.predicates,
subjectNode,
DataFactory.defaultGraph()
);
return internal_addAdditionsToChangeLog(
internal_addDeletionsToChangeLog(
freeze({
...solidDataset,
graphs: updatedGraphs,
}),
deletions
),
additions
);
}
/**
* Remove a Thing from a SolidDataset.
*
* @param solidDataset The SolidDataset to remove a Thing from.
* @param thing The Thing to remove from `solidDataset`.
* @returns A new [[SolidDataset]] equal to the input SolidDataset, excluding the given Thing.
*/
export function removeThing<Dataset extends SolidDataset>(
solidDataset: Dataset,
thing: UrlString | Url | Thing
): Dataset & WithChangeLog {
let thingIri: IriString;
if (isNamedNode(thing)) {
thingIri = thing.value;
} else if (typeof thing === "string") {
thingIri =
isLocalNodeIri(thing) && hasServerResourceInfo(solidDataset)
? resolveLocalIri(getLocalNodeName(thing), getSourceUrl(solidDataset))
: thing;
} else if (isThingLocal(thing)) {
thingIri = thing.url;
} else {
thingIri = asIri(thing);
}
const defaultGraph = solidDataset.graphs.default;
const updatedDefaultGraph = { ...defaultGraph };
delete updatedDefaultGraph[thingIri];
const updatedGraphs = freeze({
...solidDataset.graphs,
default: freeze(updatedDefaultGraph),
});
const subjectNode = DataFactory.namedNode(thingIri);
const deletedThingPredicates =
solidDataset.graphs.default[thingIri]?.predicates;
const deletions =
typeof deletedThingPredicates !== "undefined"
? subjectToRdfJsQuads(
deletedThingPredicates,
subjectNode,
DataFactory.defaultGraph()
)
: [];
return internal_addDeletionsToChangeLog(
freeze({
...solidDataset,
graphs: updatedGraphs,
}),
deletions
);
}
/** Pass these options to [[createThing]] to initialise a new [[Thing]] whose URL will be determined when it is saved. */
export type CreateThingLocalOptions = {
/**
* The name that should be used for this [[Thing]] when constructing its URL.
*
* If not provided, a random one will be generated.
*/
name?: string;
};
/** Pass these options to [[createThing]] to initialise a new [[Thing]] whose URL is already known. */
export type CreateThingPersistedOptions = {
/**
* The URL of the newly created [[Thing]].
*/
url: UrlString;
};
/** The options you pass to [[createThing]].
* - To specify the URL for the initialised Thing, pass [[CreateThingPersistedOptions]].
* - To have the URL determined during the save, pass [[CreateThingLocalOptions]].
*/
export type CreateThingOptions =
| CreateThingLocalOptions
| CreateThingPersistedOptions;
/**
* Initialise a new [[Thing]] in memory with a given URL.
*
* @param options See [[CreateThingPersistedOptions]] for how to specify the new [[Thing]]'s URL.
*/
export function createThing(
options: CreateThingPersistedOptions
): ThingPersisted;
/**
* Initialise a new [[Thing]] in memory.
*
* @param options Optional parameters that affect the final URL of this [[Thing]] when saved.
*/
export function createThing(options?: CreateThingLocalOptions): ThingLocal;
export function createThing(options?: CreateThingOptions): Thing;
export function createThing(options: CreateThingOptions = {}): Thing {
if (typeof (options as CreateThingPersistedOptions).url !== "undefined") {
const url = (options as CreateThingPersistedOptions).url;
if (!internal_isValidUrl(url)) {
throw new ValidThingUrlExpectedError(url);
}
const thing: ThingPersisted = freeze({
type: "Subject",
predicates: freeze({}),
url: url,
});
return thing;
}
const name = (options as CreateThingLocalOptions).name ?? generateName();
const localNodeIri = getLocalNodeIri(name);
const thing: ThingLocal = freeze({
type: "Subject",
predicates: freeze({}),
url: localNodeIri,
});
return thing;
}
/**
* @param input An value that might be a [[Thing]].
* @returns Whether `input` is a Thing.
* @since 0.2.0
*/
export function isThing<X>(input: X | Thing): input is Thing {
return (
typeof input === "object" &&
input !== null &&
typeof (input as Thing).type === "string" &&
(input as Thing).type === "Subject"
);
}
type IsNotThingLocal<T extends Thing> = T extends ThingLocal ? never : T;
/**
* Get the URL to a given [[Thing]].
*
* @param thing The [[Thing]] you want to obtain the URL from.
* @param baseUrl If `thing` is not persisted yet, the base URL that should be used to construct this [[Thing]]'s URL.
*/
export function asUrl(thing: ThingLocal, baseUrl: UrlString): UrlString;
export function asUrl<T extends ThingPersisted>(
thing: T & IsNotThingLocal<T>
): UrlString;
export function asUrl(thing: Thing, baseUrl: UrlString): UrlString;
export function asUrl(thing: Thing, baseUrl?: UrlString): UrlString {
if (isThingLocal(thing)) {
if (typeof baseUrl === "undefined") {
throw new Error(
"The URL of a Thing that has not been persisted cannot be determined without a base URL."
);
}
return resolveLocalIri(getLocalNodeName(thing.url), baseUrl);
}
return thing.url;
}
/** @hidden Alias of [[asUrl]] for those who prefer IRI terminology. */
export const asIri = asUrl;
/**
* Gets a human-readable representation of the given Thing to aid debugging.
*
* Note that changes to the exact format of the return value are not considered a breaking change;
* it is intended to aid in debugging, not as a serialisation method that can be reliably parsed.
*
* @param thing The Thing to get a human-readable representation of.
* @since 0.3.0
*/
export function thingAsMarkdown(thing: Thing): string {
let thingAsMarkdown: string = "";
if (isThingLocal(thing)) {
thingAsMarkdown += `## Thing (no URL yet — identifier: \`#${getLocalNodeName(
thing.url
)}\`)\n`;
} else {
thingAsMarkdown += `## Thing: ${thing.url}\n`;
}
const predicateIris = Object.keys(thing.predicates);
if (predicateIris.length === 0) {
thingAsMarkdown += "\n<empty>\n";
} else {
for (const predicate of predicateIris) {
thingAsMarkdown += `\nProperty: ${predicate}\n`;
const values = getTermAll(thing, predicate);
values.forEach((value) => {
thingAsMarkdown += `- ${internal_getReadableValue(value)}\n`;
});
}
}
return thingAsMarkdown;
}
/**
* @param thing The [[Thing]] of which a URL might or might not be known.
* @return `true` if `thing` has no known URL yet.
* @since 1.7.0
*/
export function isThingLocal(
thing: ThingPersisted | ThingLocal
): thing is ThingLocal {
return isLocalNodeIri(thing.url);
}
/**
* This error is thrown when a function expected to receive a [[Thing]] but received something else.
* @since 1.2.0
*/
export class ThingExpectedError extends SolidClientError {
public readonly receivedValue: unknown;
constructor(receivedValue: unknown) {
const message = `Expected a Thing, but received: [${receivedValue}].`;
super(message);
this.receivedValue = receivedValue;
}
}
/**
* This error is thrown when a function expected to receive a valid URL to identify a property but received something else.
*/
export class ValidPropertyUrlExpectedError extends SolidClientError {
public readonly receivedProperty: unknown;
constructor(receivedValue: unknown) {
const value = isNamedNode(receivedValue)
? receivedValue.value
: receivedValue;
const message = `Expected a valid URL to identify a property, but received: [${value}].`;
super(message);
this.receivedProperty = value;
}
}
/**
* This error is thrown when a function expected to receive a valid URL value but received something else.
*/
export class ValidValueUrlExpectedError extends SolidClientError {
public readonly receivedValue: unknown;
constructor(receivedValue: unknown) {
const value = isNamedNode(receivedValue)
? receivedValue.value
: receivedValue;
const message = `Expected a valid URL value, but received: [${value}].`;
super(message);
this.receivedValue = value;
}
}
/**
* This error is thrown when a function expected to receive a valid URL to identify a [[Thing]] but received something else.
*/
export class ValidThingUrlExpectedError extends SolidClientError {
public readonly receivedValue: unknown;
constructor(receivedValue: unknown) {
const value = isNamedNode(receivedValue)
? receivedValue.value
: receivedValue;
const message = `Expected a valid URL to identify a Thing, but received: [${value}].`;
super(message);
this.receivedValue = value;
}
}
/**
* Generate a string that can be used as the unique identifier for a Thing
*
* This function works by starting with a date string (so that Things can be
* sorted chronologically), followed by a random number generated by taking a
* random number between 0 and 1, and cutting off the `0.`.
*
* @internal
* @returns An string that's likely to be unique
*/
const generateName = () => {
return (
Date.now().toString() + Math.random().toString().substring("0.".length)
);
}; | the_stack |
import { Action, ActionFunctionAny, createActions, handleActions } from 'redux-actions';
import DetailState from './SpanDetail/DetailState';
import { TNil } from '../../../types';
import { Log, Span, Trace } from '../../../types/trace';
import TTraceTimeline from '../../../types/TTraceTimeline';
import filterSpans from '../../../utils/filter-spans';
import generateActionTypes from '../../../utils/generate-action-types';
import guardReducer from '../../../utils/guardReducer';
import spanAncestorIds from '../../../utils/span-ancestor-ids';
// payloads
export type TSpanIdLogValue = { logItem: Log; spanID: string };
export type TSpanIdValue = { spanID: string };
type TSpansValue = { spans: Span[] };
type TTraceUiFindValue = { trace: Trace; uiFind: string | TNil; allowHide?: boolean };
export type TWidthValue = { width: number };
export type TActionTypes =
| TSpanIdLogValue
| TSpanIdValue
| TSpansValue
| TTraceUiFindValue
| TWidthValue
| {};
type TTimelineViewerActions = {
[actionName: string]: ActionFunctionAny<Action<TActionTypes>>;
};
function shouldDisableCollapse(allSpans: Span[], hiddenSpansIds: Set<string>) {
const allParentSpans = allSpans.filter(s => s.hasChildren);
return allParentSpans.length === hiddenSpansIds.size;
}
export function newInitialState(): TTraceTimeline {
return {
childrenHiddenIDs: new Set(),
detailStates: new Map(),
hoverIndentGuideIds: new Set(),
shouldScrollToFirstUiFindMatch: false,
spanNameColumnWidth: 0.25,
traceID: null,
};
}
export const actionTypes = generateActionTypes('@jaeger-ui/trace-timeline-viewer', [
'ADD_HOVER_INDENT_GUIDE_ID',
'CHILDREN_TOGGLE',
'CLEAR_SHOULD_SCROLL_TO_FIRST_UI_FIND_MATCH',
'COLLAPSE_ALL',
'COLLAPSE_ONE',
'DETAIL_TOGGLE',
'DETAIL_TAGS_TOGGLE',
'DETAIL_PROCESS_TOGGLE',
'DETAIL_LOGS_TOGGLE',
'DETAIL_LOG_ITEM_TOGGLE',
'DETAIL_WARNINGS_TOGGLE',
'DETAIL_REFERENCES_TOGGLE',
'EXPAND_ALL',
'EXPAND_ONE',
'FOCUS_UI_FIND_MATCHES',
'REMOVE_HOVER_INDENT_GUIDE_ID',
'SET_SPAN_NAME_COLUMN_WIDTH',
'SET_TRACE',
]);
const fullActions = createActions<TActionTypes>({
[actionTypes.ADD_HOVER_INDENT_GUIDE_ID]: (spanID: string) => ({ spanID }),
[actionTypes.CHILDREN_TOGGLE]: (spanID: string) => ({ spanID }),
[actionTypes.CLEAR_SHOULD_SCROLL_TO_FIRST_UI_FIND_MATCH]: () => ({}),
[actionTypes.COLLAPSE_ALL]: (spans: Span[]) => ({ spans }),
[actionTypes.COLLAPSE_ONE]: (spans: Span[]) => ({ spans }),
[actionTypes.DETAIL_LOG_ITEM_TOGGLE]: (spanID: string, logItem: Log) => ({ logItem, spanID }),
[actionTypes.DETAIL_LOGS_TOGGLE]: (spanID: string) => ({ spanID }),
[actionTypes.EXPAND_ALL]: () => ({}),
[actionTypes.EXPAND_ONE]: (spans: Span[]) => ({ spans }),
[actionTypes.DETAIL_PROCESS_TOGGLE]: (spanID: string) => ({ spanID }),
[actionTypes.DETAIL_WARNINGS_TOGGLE]: (spanID: string) => ({ spanID }),
[actionTypes.DETAIL_REFERENCES_TOGGLE]: (spanID: string) => ({ spanID }),
[actionTypes.DETAIL_TAGS_TOGGLE]: (spanID: string) => ({ spanID }),
[actionTypes.DETAIL_TOGGLE]: (spanID: string) => ({ spanID }),
[actionTypes.FOCUS_UI_FIND_MATCHES]: (trace: Trace, uiFind: string | TNil, allowHide?: boolean) => ({
trace,
uiFind,
allowHide,
}),
[actionTypes.REMOVE_HOVER_INDENT_GUIDE_ID]: (spanID: string) => ({ spanID }),
[actionTypes.SET_SPAN_NAME_COLUMN_WIDTH]: (width: number) => ({ width }),
[actionTypes.SET_TRACE]: (trace: Trace, uiFind: string | TNil) => ({ trace, uiFind }),
});
export const actions = (fullActions as any).jaegerUi.traceTimelineViewer as TTimelineViewerActions;
function calculateFocusedFindRowStates(uiFind: string, spans: Span[], allowHide: boolean = true) {
const spansMap = new Map();
const childrenHiddenIDs: Set<string> = new Set();
const detailStates: Map<string, DetailState> = new Map();
let shouldScrollToFirstUiFindMatch: boolean = false;
spans.forEach(span => {
spansMap.set(span.spanID, span);
if (allowHide) {
childrenHiddenIDs.add(span.spanID);
}
});
const matchedSpanIds = filterSpans(uiFind, spans);
if (matchedSpanIds && matchedSpanIds.size) {
matchedSpanIds.forEach(spanID => {
const span = spansMap.get(spanID);
detailStates.set(spanID, new DetailState());
spanAncestorIds(span).forEach(ancestorID => childrenHiddenIDs.delete(ancestorID));
});
shouldScrollToFirstUiFindMatch = true;
}
return {
childrenHiddenIDs,
detailStates,
shouldScrollToFirstUiFindMatch,
};
}
function focusUiFindMatches(state: TTraceTimeline, { uiFind, trace, allowHide }: TTraceUiFindValue) {
if (!uiFind) return state;
return {
...state,
...calculateFocusedFindRowStates(uiFind, trace.spans, allowHide),
};
}
function clearShouldScrollToFirstUiFindMatch(state: TTraceTimeline) {
if (state.shouldScrollToFirstUiFindMatch) {
return { ...state, shouldScrollToFirstUiFindMatch: false };
}
return state;
}
function setTrace(state: TTraceTimeline, { uiFind, trace }: TTraceUiFindValue) {
const { traceID, spans } = trace;
if (traceID === state.traceID) {
return state;
}
const { spanNameColumnWidth } = state;
return Object.assign(
{ ...newInitialState(), spanNameColumnWidth, traceID },
uiFind ? calculateFocusedFindRowStates(uiFind, spans) : null
);
}
function setColumnWidth(state: TTraceTimeline, { width }: TWidthValue): TTraceTimeline {
return { ...state, spanNameColumnWidth: width };
}
function childrenToggle(state: TTraceTimeline, { spanID }: TSpanIdValue): TTraceTimeline {
const childrenHiddenIDs = new Set(state.childrenHiddenIDs);
if (childrenHiddenIDs.has(spanID)) {
childrenHiddenIDs.delete(spanID);
} else {
childrenHiddenIDs.add(spanID);
}
return { ...state, childrenHiddenIDs };
}
export function expandAll(state: TTraceTimeline): TTraceTimeline {
const childrenHiddenIDs = new Set<string>();
return { ...state, childrenHiddenIDs };
}
export function collapseAll(state: TTraceTimeline, { spans }: TSpansValue) {
if (shouldDisableCollapse(spans, state.childrenHiddenIDs)) {
return state;
}
const childrenHiddenIDs = spans.reduce((res, s) => {
if (s.hasChildren) {
res.add(s.spanID);
}
return res;
}, new Set<string>());
return { ...state, childrenHiddenIDs };
}
export function collapseOne(state: TTraceTimeline, { spans }: TSpansValue) {
if (shouldDisableCollapse(spans, state.childrenHiddenIDs)) {
return state;
}
let nearestCollapsedAncestor: Span | undefined;
const childrenHiddenIDs = spans.reduce((res, curSpan) => {
if (nearestCollapsedAncestor && curSpan.depth <= nearestCollapsedAncestor.depth) {
res.add(nearestCollapsedAncestor.spanID);
if (curSpan.hasChildren) {
nearestCollapsedAncestor = curSpan;
}
} else if (curSpan.hasChildren && !res.has(curSpan.spanID)) {
nearestCollapsedAncestor = curSpan;
}
return res;
}, new Set(state.childrenHiddenIDs));
// The last one
if (nearestCollapsedAncestor) {
childrenHiddenIDs.add(nearestCollapsedAncestor.spanID);
}
return { ...state, childrenHiddenIDs };
}
export function expandOne(state: TTraceTimeline, { spans }: TSpansValue) {
if (state.childrenHiddenIDs.size === 0) {
return state;
}
let prevExpandedDepth = -1;
let expandNextHiddenSpan = true;
const childrenHiddenIDs = spans.reduce((res, s) => {
if (s.depth <= prevExpandedDepth) {
expandNextHiddenSpan = true;
}
if (expandNextHiddenSpan && res.has(s.spanID)) {
res.delete(s.spanID);
expandNextHiddenSpan = false;
prevExpandedDepth = s.depth;
}
return res;
}, new Set(state.childrenHiddenIDs));
return { ...state, childrenHiddenIDs };
}
function detailToggle(state: TTraceTimeline, { spanID }: TSpanIdValue) {
const detailStates = new Map(state.detailStates);
if (detailStates.has(spanID)) {
detailStates.delete(spanID);
} else {
detailStates.set(spanID, new DetailState());
}
return { ...state, detailStates };
}
function detailSubsectionToggle(
subSection: 'tags' | 'process' | 'logs' | 'warnings' | 'references',
state: TTraceTimeline,
{ spanID }: TSpanIdValue
) {
const old = state.detailStates.get(spanID);
if (!old) {
return state;
}
let detailState;
if (subSection === 'tags') {
detailState = old.toggleTags();
} else if (subSection === 'process') {
detailState = old.toggleProcess();
} else if (subSection === 'warnings') {
detailState = old.toggleWarnings();
} else if (subSection === 'references') {
detailState = old.toggleReferences();
} else {
detailState = old.toggleLogs();
}
const detailStates = new Map(state.detailStates);
detailStates.set(spanID, detailState);
return { ...state, detailStates };
}
const detailTagsToggle = detailSubsectionToggle.bind(null, 'tags');
const detailProcessToggle = detailSubsectionToggle.bind(null, 'process');
const detailLogsToggle = detailSubsectionToggle.bind(null, 'logs');
const detailWarningsToggle = detailSubsectionToggle.bind(null, 'warnings');
const detailReferencesToggle = detailSubsectionToggle.bind(null, 'references');
function detailLogItemToggle(state: TTraceTimeline, { spanID, logItem }: TSpanIdLogValue) {
const old = state.detailStates.get(spanID);
if (!old) {
return state;
}
const detailState = old.toggleLogItem(logItem);
const detailStates = new Map(state.detailStates);
detailStates.set(spanID, detailState);
return { ...state, detailStates };
}
function addHoverIndentGuideId(state: TTraceTimeline, { spanID }: TSpanIdValue) {
const newHoverIndentGuideIds = new Set(state.hoverIndentGuideIds);
newHoverIndentGuideIds.add(spanID);
return { ...state, hoverIndentGuideIds: newHoverIndentGuideIds };
}
function removeHoverIndentGuideId(state: TTraceTimeline, { spanID }: TSpanIdValue) {
const newHoverIndentGuideIds = new Set(state.hoverIndentGuideIds);
newHoverIndentGuideIds.delete(spanID);
return { ...state, hoverIndentGuideIds: newHoverIndentGuideIds };
}
export default handleActions(
{
[actionTypes.ADD_HOVER_INDENT_GUIDE_ID]: guardReducer(addHoverIndentGuideId),
[actionTypes.CHILDREN_TOGGLE]: guardReducer(childrenToggle),
[actionTypes.CLEAR_SHOULD_SCROLL_TO_FIRST_UI_FIND_MATCH]: guardReducer(
clearShouldScrollToFirstUiFindMatch
),
[actionTypes.COLLAPSE_ALL]: guardReducer(collapseAll),
[actionTypes.COLLAPSE_ONE]: guardReducer(collapseOne),
[actionTypes.DETAIL_LOGS_TOGGLE]: guardReducer(detailLogsToggle),
[actionTypes.DETAIL_LOG_ITEM_TOGGLE]: guardReducer(detailLogItemToggle),
[actionTypes.DETAIL_PROCESS_TOGGLE]: guardReducer(detailProcessToggle),
[actionTypes.DETAIL_WARNINGS_TOGGLE]: guardReducer(detailWarningsToggle),
[actionTypes.DETAIL_REFERENCES_TOGGLE]: guardReducer(detailReferencesToggle),
[actionTypes.DETAIL_TAGS_TOGGLE]: guardReducer(detailTagsToggle),
[actionTypes.DETAIL_TOGGLE]: guardReducer(detailToggle),
[actionTypes.EXPAND_ALL]: guardReducer(expandAll),
[actionTypes.EXPAND_ONE]: guardReducer(expandOne),
[actionTypes.FOCUS_UI_FIND_MATCHES]: guardReducer(focusUiFindMatches),
[actionTypes.REMOVE_HOVER_INDENT_GUIDE_ID]: guardReducer(removeHoverIndentGuideId),
[actionTypes.SET_SPAN_NAME_COLUMN_WIDTH]: guardReducer(setColumnWidth),
[actionTypes.SET_TRACE]: guardReducer(setTrace),
},
newInitialState()
); | the_stack |
import { DataGrid, SelectionModel } from '@lumino/datagrid';
import { LabIcon, addIcon } from '@jupyterlab/ui-components';
import { EditorModel } from './model';
export class PaintedGrid extends DataGrid {
constructor(options: PaintedGrid.IOptions) {
super(options);
this._extraStyle = options.extraStyle || Private.defaultExtraStyle;
}
/**
* Get the extra styles of the PaintedGrid.
*/
get extraStyle(): PaintedGrid.ExtraStyle {
return this._extraStyle;
}
/**
* Set the styles of the painted grid.
*/
set extraStyle(value: PaintedGrid.ExtraStyle) {
// Bail if the style does not change.
if (this._extraStyle === value) {
return;
}
// Update the internal style.
this._extraStyle = { ...value };
// Schedule a repaint of the content.
this.repaintContent();
// Schedule a repaint of the overlay.
this.repaintOverlay();
}
/**
* Get the ghost row height.
*/
get ghostRowHeight(): number {
return this.defaultSizes.rowHeight;
}
/**
* Get the ghost column width.
*/
get ghostColumnWidth(): number {
return this.defaultSizes.columnWidth;
}
/**
* Selects cells using the selection model
* @param row The row being selected
* @param column The column being selected
*/
selectCells(selection: SelectionModel.Selection): void {
// Bail if no selection
if (!selection) {
return;
}
const { r1, r2, c1, c2 } = selection;
const select: SelectionModel.SelectArgs = {
r1,
r2,
c1,
c2,
cursorRow: r1,
cursorColumn: c1,
clear: 'all'
};
this.selectionModel.select(select);
}
/**
* @override paints on the ghost row and column as well after painting the other regions.
* Paint the grid content for the given dirty rect.
*
* The rect should be expressed in valid viewport coordinates.
*
* This is the primary paint entry point. The individual `_draw*`
* methods should not be invoked directly. This method dispatches
* to the drawing methods in the correct order.
*/
paintContent(rx: number, ry: number, rw: number, rh: number): void {
// Paint in the background, rows, columns, and cells first.
super.paintContent(rx, ry, rw, rh);
// Paint addons.
this._paintAddons(rx, ry, rw, rh);
}
/**
* Primary entry point for painting additional graphics on top of
* the base data grid graphics.
*/
private _paintAddons(rx: number, ry: number, rw: number, rh: number): void {
// Draw the ghost row.
this._drawGhostRow(rx, ry, rw, rh);
// Draw the header region for the ghost row.
this._drawGhostRowHeader(rx, ry, rw, rh);
// Draw the ghost column.
this._drawGhostColumn(rx, ry, rw, rh);
// Draw the header region for the ghost column.
this._drawGhostColumnHeader(rx, ry, rw, rh);
// Draw over the corner to hide it from view.
this._drawOverCorner(rx, ry, rw, rh);
// Draw the icons.
const model = this.dataModel as EditorModel;
if (model && model.isDataFormatted) {
this._paintDatatypeIcons(rx, ry, rw, rh);
this.drawCornerHeaderRegion(0, 0, this.headerWidth, this.headerHeight);
}
}
/**
* Draw the ghost row.
* @param rx
* @param ry
* @param rw
* @param rh
*/
private _drawGhostRow(rx: number, ry: number, rw: number, rh: number): void {
// Get the visible content dimensions.
const contentW = this.bodyWidth - this.scrollX;
const contentH = this.defaultSizes.rowHeight;
// Bail if there is no content to draw.
if (contentW <= 0 || contentH <= 0) {
return;
}
// Get the visible content origin.
const contentX = this.headerWidth;
const contentY =
this.headerHeight + this.bodyHeight - contentH - this.scrollY;
// Bail if the dirty rect does not intersect the content area.
if (rx + rw <= contentX) {
return;
}
if (ry + rh <= contentY) {
return;
}
if (rx >= contentX + contentW) {
return;
}
if (ry >= contentY + contentH) {
return;
}
// Get the upper and lower bounds of the dirty content area.
const x1 = Math.max(rx, contentX);
const y1 = Math.max(ry, contentY);
const x2 = Math.min(rx + rw - 1, contentX + contentW - 1);
const y2 = Math.min(ry + rh - 1, contentY + contentH - 1);
// Fill the region with the specified color.
this.canvasGC.fillStyle = this._extraStyle.ghostRowColor;
this.canvasGC.fillRect(x1, y1, x2 - x1 + 1, y2 - y1 + 1);
}
/**
* Draw the ghost column.
* @param rx
* @param ry
* @param rw
* @param rh
*/
private _drawGhostColumn(
rx: number,
ry: number,
rw: number,
rh: number
): void {
// Get the visible content dimensions.
const contentW = this.defaultSizes.columnWidth;
const contentH = this.bodyHeight - this.scrollY;
// Bail if there is no content to draw.
if (contentW <= 0 || contentH <= 0) {
return;
}
// Get the visible content origin.
const contentX =
this.headerWidth + this.bodyWidth - contentW - this.scrollX;
const contentY = this.headerHeight;
// Bail if the dirty rect does not intersect the content area.
if (rx + rw <= contentX) {
return;
}
if (ry + rh <= contentY) {
return;
}
if (rx >= contentX + contentW) {
return;
}
if (ry >= contentY + contentH) {
return;
}
// Get the upper and lower bounds of the dirty content area.
const x1 = Math.max(rx, contentX);
const y1 = Math.max(ry, contentY);
const x2 = Math.min(rx + rw - 1, contentX + contentW - 1);
const y2 = Math.min(ry + rh - 1, contentY + contentH - 1);
// Fill the region with the specified color.
this.canvasGC.fillStyle = this._extraStyle.ghostColumnColor;
this.canvasGC.fillRect(x1, y1, x2 - x1 + 1, y2 - y1 + 1);
}
/**
* Draw the ghost row header
* @param rx
* @param ry
* @param rw
* @param rh
*/
private _drawGhostRowHeader(
rx: number,
ry: number,
rw: number,
rh: number
): void {
// Get the visible content dimensions.
const contentW = this.headerWidth;
const contentH = this.defaultSizes.rowHeight;
// Bail if there is no content to draw.
if (contentW <= 0 || contentH <= 0) {
return;
}
// Get the visible content origin.
const contentX = 0;
const contentY =
this.headerHeight + this.bodyHeight - contentH - this.scrollY;
// Bail if the dirty rect does not intersect the content area.
if (rx + rw <= contentX) {
return;
}
if (ry + rh <= contentY) {
return;
}
if (rx >= contentX + contentW) {
return;
}
if (ry >= contentY + contentH) {
return;
}
// Get the upper and lower bounds of the dirty content area.
const x1 = rx;
const y1 = Math.max(ry, contentY);
const x2 = Math.min(rx + rw - 1, contentX + contentW - 1);
const y2 = Math.min(ry + rh - 1, contentY + contentH - 1);
// Fill the region with the specified color.
this.canvasGC.fillStyle = this._extraStyle.ghostRowColor;
this.canvasGC.fillRect(x1, y1, x2 - x1 + 1, y2 - y1 + 1);
this._paintGhostRowIcon();
}
/**
* Draw the ghost column header
* @param rx
* @param ry
* @param rw
* @param rh
*/
private _drawGhostColumnHeader(
rx: number,
ry: number,
rw: number,
rh: number
): void {
// Get the visible content dimensions.
const contentW = this.defaultSizes.columnWidth;
const contentH = this.headerHeight;
// Bail if there is no content to draw.
if (contentW <= 0 || contentH <= 0) {
return;
}
// Get the visible content origin.
const contentX =
this.headerWidth + this.bodyWidth - contentW - this.scrollX;
const contentY = 0;
// Bail if the dirty rect does not intersect the content area.
if (rx + rw <= contentX) {
return;
}
if (ry + rh <= contentY) {
return;
}
if (rx >= contentX + contentW) {
return;
}
if (ry >= contentY + contentH) {
return;
}
// Get the upper and lower bounds of the dirty content area.
const x1 = Math.max(rx, contentX);
const y1 = Math.max(ry, contentY);
const x2 = Math.min(rx + rw - 1, contentX + contentW - 1);
const y2 = Math.min(ry + rh - 1, contentY + contentH - 1);
// Fill the region with the specified color.
this.canvasGC.fillStyle = this._extraStyle.ghostColumnColor;
this.canvasGC.fillRect(x1, y1, x2 - x1 + 1, y2 - y1 + 1);
this._paintGhostColumnIcon();
}
/**
* Paints the ghost row icon
*/
private _paintGhostRowIcon(): void {
// Get the dimensions for the cell.
const cellH = this.defaultSizes.rowHeight;
// Get the icon arguments.
const iconArgs = this._extraStyle.icons['ghost-row'];
// Bail early if there are no icon arguments.
if (!iconArgs) {
return;
}
// Unpack the icon arguments.
const { icon, color, size, top, left } = iconArgs;
// Calculate the y position for the icon
const y = this.headerHeight + this.bodyHeight - cellH + top - this.scrollY;
this._drawIcon(left, y, size, color, icon.svgstr);
}
/**
* Paints the ghost column icon
*/
private _paintGhostColumnIcon(): void {
// Get the dimensions for the cell.
const cellW = this.defaultSizes.columnWidth;
// Get the icon arguments.
const iconArgs = this._extraStyle.icons['ghost-column'];
// Bail early if there are no icon arguments.
if (!iconArgs) {
return;
}
// Unpack the icon arguments.
const { icon, color, size, left, top } = iconArgs;
// Calculate x position for the icon
const x = this.headerWidth + this.bodyWidth - cellW + left - this.scrollX;
this._drawIcon(x, top, size, color, icon.svgstr);
}
private _drawOverCorner(
rx: number,
ry: number,
rw: number,
rh: number
): void {
// Get the visible content dimensions.
const contentW = this.defaultSizes.columnWidth;
const contentH = this.defaultSizes.rowHeight;
// Bail if there is no content to draw.
if (contentW <= 0 || contentH <= 0) {
return;
}
// Get the visible content origin.
const contentX =
this.headerWidth + this.bodyWidth - contentW - this.scrollX;
const contentY =
this.headerHeight + this.bodyHeight - contentH - this.scrollY;
// Bail if the dirty rect does not intersect the content area.
if (rx + rw <= contentX) {
return;
}
if (ry + rh <= contentY) {
return;
}
if (rx >= contentX + contentW) {
return;
}
if (ry >= contentY + contentH) {
return;
}
// Get the upper and lower bounds of the dirty content area.
const x1 = Math.max(rx, contentX);
const y1 = Math.max(ry, contentY);
const x2 = Math.min(rx + rw - 1, contentX + contentW - 1);
const y2 = Math.min(ry + rh - 1, contentY + contentH - 1);
// Fill the region with the specified color.
this.canvasGC.fillStyle = this.style.voidColor;
this.canvasGC.fillRect(x1, y1, x2 - x1 + 1, y2 - y1 + 1);
}
/**
* Paints the datatype icons (string, number, boolean, date)
* @param rx
* @param ry
* @param rw
* @param rh
*/
private _paintDatatypeIcons(
rx: number,
ry: number,
rw: number,
rh: number
): void {
// Get the visible content dimensions.
const contentW = this.bodyWidth - this.scrollX;
const contentH = this.headerHeight;
// Bail if there is no content to draw.
if (contentW <= 0 || contentH <= 0) {
return;
}
// Get the visible content origin.
const contentX = this.headerWidth;
const contentY = 0;
// Bail if the dirty rect does not intersect the content area.
if (rx + rw <= contentX) {
return;
}
if (ry + rh <= contentY) {
return;
}
if (rx >= contentX + contentW) {
return;
}
if (ry >= contentY + contentH) {
return;
}
// Fetch the geometry.
const bw = this.bodyWidth;
const pw = this.pageWidth;
// Get the upper and lower bounds of the dirty content area.
const x1 = Math.max(rx, contentX);
const y1 = ry;
let x2 = Math.min(rx + rw - 1, contentX + contentW - 1);
const y2 = Math.min(ry + rh - 1, contentY + contentH - 1);
// Convert the dirty content bounds into cell bounds.
const r1 = this.columnHeaderSections.indexOf(y1);
const c1 = this.columnSections.indexOf(x1 - contentX + this.scrollX);
let r2 = this.columnHeaderSections.indexOf(y2);
let c2 = this.columnSections.indexOf(x2 - contentX + this.scrollX);
// Fetch the max row and column.
const maxRow = this.columnHeaderSections.count - 1;
const maxColumn = this.columnSections.count - 1;
// Handle a dirty content area larger than the cell count.
if (r2 < 0) {
r2 = maxRow;
}
if (c2 < 0) {
c2 = maxColumn;
}
// Convert the cell bounds back to visible coordinates.
const x = this.columnSections.offsetOf(c1) + contentX - this.scrollX;
const y = this.columnHeaderSections.offsetOf(r1);
// Set up the paint region size variables.
let width = 0;
let height = 0;
// Allocate the section sizes arrays.
const rowSizes = new Array<number>(r2 - r1 + 1);
const columnSizes = new Array<number>(c2 - c1 + 1);
// Get the row sizes for the region.
for (let j = r1; j <= r2; ++j) {
const size = this.columnHeaderSections.sizeOf(j);
rowSizes[j - r1] = size;
height += size;
}
// Get the column sizes for the region.
for (let i = c1; i <= c2; ++i) {
const size = this.columnSections.sizeOf(i);
columnSizes[i - c1] = size;
width += size;
}
// Adjust the geometry if the last column is stretched.
if (this.stretchLastColumn && pw > bw && c2 === maxColumn) {
const dw = this.pageWidth - this.bodyWidth;
columnSizes[columnSizes.length - 1] += dw;
width += dw;
x2 += dw;
}
// Create the paint region object.
const rgn = {
region: 'column-header',
xMin: x1,
yMin: y1,
xMax: x2,
yMax: y2,
x,
y,
width,
height,
row: r1,
column: c1,
rowSizes,
columnSizes
};
for (let x = rgn.x, i = 0, n = rgn.columnSizes.length; i < n; ++i) {
// Fetch the size of the column.
const columnSize = rgn.columnSizes[i];
// Bail if we are on the last column.
if (rgn.column + i + 1 === this.dataModel.columnCount('body')) {
return;
}
// Skip zero sized columns.
if (columnSize === 0) {
continue;
}
// Fetch the model.
const model = this.dataModel as EditorModel;
// Fetch the data type for the column.
const metadata = model.dataTypes[rgn.column + i];
// Fetch the icon spec from the type
const iconArgs = this._extraStyle.icons[metadata.type];
// Skip if there is no icon args.
if (!iconArgs) {
continue;
}
// Unpack the icon arguments.
const { icon, color, size, left, top } = iconArgs;
this._drawIcon(x + left, y + top, size, color, icon.svgstr);
// Increment the running X coordinate.
x += columnSize;
}
}
/**
* Utilizes the canvas GC to draw the icon in the correct position with the correct styles
* @param x The horizontal position of the icon
* @param y The vertical position of the icon
* @param size The original size of the icon
* @param color The fill color for the icon
* @param svgstr A string containing the raw contents of the svg file
*/
private _drawIcon(
x: number,
y: number,
size: number,
color: string,
svgstr: string
): void {
// Parse the icon path from the icon string.
const { defaultSize, path } = Private.parseSVG(svgstr);
// Solve for the scaling factor using the provided width or the default.
const scale = size / defaultSize;
// Create a path 2d object from the path string.
const canvasPath = new Path2D(path);
// Get the current transform state.
const transform = this.canvasGC.getTransform();
// Orient to the desired origin for the icon.
this.canvasGC.translate(x, y);
// Scale the canvas.
this.canvasGC.scale(scale, scale);
// Set the canvas fill style.
this.canvasGC.fillStyle = color;
// Draw the icon.
this.canvasGC.fill(canvasPath, 'nonzero');
// Reset the transform to the initial state
this.canvasGC.setTransform(transform);
}
private _extraStyle: PaintedGrid.ExtraStyle;
}
/**
* Namespace for class statics.
*/
export namespace PaintedGrid {
/**
* The options for creating a new PaintedGrid.
*/
export interface IOptions extends DataGrid.IOptions {
extraStyle?: ExtraStyle;
}
/**
* The extra styling options for a painted grid.
*/
export type ExtraStyle = {
/**
* The color of the ghost row.
*
* NOTE: This is painted on top of the last row and so
* in most cases an opaque color is chosen.
*/
ghostRowColor?: string;
/**
* The color of the ghost column.
*
* NOTE: This is painted on top of the last column and so
* in most cases an opaque color is chosen.
*/
ghostColumnColor?: string;
/**
* A object mapping data types to Icons.
*/
icons?: { [key: string]: IIconArgs };
};
export type IIconArgs = {
/**
* The icon to paint on the grid.
*/
icon: LabIcon;
/**
* The fill color for the icon.
*/
color: string;
/**
* Distance right in pixcels from the left cell boundary.
*/
left?: number;
/**
* Distance right in pixcels from the top cell boundary.
*/
top?: number;
/**
* Size of icon in pixcels.
*/
size?: number;
};
}
/**
* Namespace for module implementation details.
*/
namespace Private {
export interface ISVGInfo {
defaultSize: number;
path: string;
}
/**
* Parse an svg string into a standard form.
*/
export function parseSVG(svgstr: string): ISVGInfo {
// Set up a regular expression to get the size.
let regex = /viewBox="(.+?)"/;
const viewBox = svgstr
.match(regex)[1]
.split(' ')
.map(digit => parseInt(digit));
const defaultSize = viewBox[2];
// Redefine the regular expression to get the path string.
regex = /path d="(.+?)"/;
// Fetch the path string.
const path = svgstr.match(regex)[1];
return { defaultSize, path };
}
export const defaultExtraStyle = {
ghostRowColor: 'rgba(243, 243, 243, 0.80)',
ghostColumnColor: 'rgba(243, 243, 243, 0.80)',
icons: {
'ghost-column': {
icon: addIcon,
color: '#616161',
size: 18,
left: 63,
top: 9
},
'ghost-row': {
icon: addIcon,
color: '#bdbdbd',
size: 12,
left: 26,
top: 6
}
}
};
} | the_stack |
import { defineComponent, getCurrentInstance, ref, h, nextTick, watch, onUnmounted, WatchStopHandle, VNode } from 'vue'
import { AnyObject, VcComponentInternalInstance, VcComponentPublicInstance } from '@vue-cesium/utils/types'
import { useCommon } from '@vue-cesium/composables'
import { VcCollectionPoint, VcCollectionPrimitive } from '@vue-cesium/components/primitive-collections'
import { DrawStatus } from '@vue-cesium/shared'
import defaultProps from './defaultProps'
import { VcOverlayHtml } from '@vue-cesium/components/overlays'
import { t } from '@vue-cesium/locale'
import { VcBtn, VcTooltip } from '@vue-cesium/components/ui'
import { PointDrawing } from '../drawing.types'
import useTimeout from '@vue-cesium/composables/private/use-timeout'
import useCustomUpdate from '@vue-cesium/composables/private/use-custom-update'
export default defineComponent({
name: 'VcDrawingPoint',
props: defaultProps,
emits: ['beforeLoad', 'ready', 'destroyed', 'drawEvt', 'editorEvt', 'mouseEvt'],
setup(props, ctx) {
// state
const instance = getCurrentInstance() as VcComponentInternalInstance
instance.cesiumClass = 'VcDrawingPoint'
instance.cesiumEvents = []
const commonState = useCommon(props, ctx, instance)
if (commonState === void 0) {
return
}
const { $services } = commonState
const { emit } = ctx
const points = ref<Array<PointDrawing>>([])
const drawStatus = ref(DrawStatus.BeforeDraw)
const canShowDrawTip = ref(false)
const drawTipPosition = ref<Array<number> | Cesium.Cartesian3>([0, 0, 0])
const drawTip = ref('')
const showEditor = ref(false)
const editorPosition = ref<Array<number> | Cesium.Cartesian3>([0, 0, 0])
const mouseoverPoint = ref<any>(null)
const editingPoint = ref<any>(null)
const primitiveCollectionRef = ref<VcComponentPublicInstance | null>(null)
let restorePoint
let unwatchFns: Array<WatchStopHandle> = []
let editorType = ''
const { registerTimeout, removeTimeout } = useTimeout()
const { onVcCollectionPointReady } = useCustomUpdate()
// watch
unwatchFns.push(
watch(
() => props.editable,
val => {
const { drawingVm, selectedDrawingOption } = $services
if (val && selectedDrawingOption?.name === 'point') {
;(drawingVm?.proxy as any).toggleAction(selectedDrawingOption)
}
}
)
)
// methods
instance.createCesiumObject = async () => {
return primitiveCollectionRef
}
const startNew = () => {
const { Cartesian3 } = Cesium
const point: PointDrawing = {
drawStatus: DrawStatus.Drawing,
show: false,
position: new Cartesian3()
}
points.value.push(point)
drawStatus.value = DrawStatus.Drawing
canShowDrawTip.value = true
drawTip.value = props.drawtip?.drawingTip1 || t('vc.drawing.point.drawTip1')
}
const stop = () => {
if (drawStatus.value === DrawStatus.Drawing) {
points.value.pop()
}
drawStatus.value = DrawStatus.BeforeDraw
canShowDrawTip.value = false
drawTipPosition.value = [0, 0, 0]
}
const handleMouseClick = (movement, options?) => {
const { viewer, drawingVm, getWorldPosition, selectedDrawingOption } = $services
if (options.button === 2 && options.ctrl) {
const drawingsOption = (drawingVm?.proxy as any).drawingsOptions.find(v => v.name === 'point')
;(drawingVm?.proxy as any).toggleAction(drawingsOption)
return
}
// if (drawStatus.value === DrawStatus.AfterDraw) {
// startNew()
// }
const index = editingPoint.value ? editingPoint.value._vcPolylineIndx : points.value.length - 1
const point: PointDrawing = points.value[index]
if (options.button === 2 && editingPoint.value) {
;(drawingVm?.proxy as any).editingDrawingName = undefined
points.value[index] = restorePoint
drawStatus.value = DrawStatus.AfterDraw
points.value[index].drawStatus = DrawStatus.AfterDraw
editingPoint.value = undefined
drawTip.value = props.drawtip?.drawTip1 || t('vc.drawing.point.drawTip1')
return
}
if (options.button !== 0) {
return
}
const { defined } = Cesium
let type = 'new'
if (drawStatus.value === DrawStatus.BeforeDraw) {
const scene = viewer.scene
const position = getWorldPosition(scene, movement, {} as any)
if (!defined(position)) {
return
}
point.position = position
point.show = true
point.drawStatus = DrawStatus.AfterDraw
drawStatus.value = DrawStatus.AfterDraw
drawTip.value = props.drawtip?.drawingTip1 || t('vc.drawing.point.drawTip1')
nextTick(() => {
emit(
'drawEvt',
{
index: index,
points: points,
name: 'point',
finished: true,
position: position,
windowPoistion: movement,
type: type
},
viewer
)
})
} else {
drawStatus.value = DrawStatus.AfterDraw
point.drawStatus = DrawStatus.AfterDraw
if (editingPoint.value) {
editingPoint.value = undefined
;(drawingVm?.proxy as any).editingDrawingName = undefined
canShowDrawTip.value = false
type = editorType
} else {
if (props.mode === 1) {
;(drawingVm?.proxy as any).toggleAction(selectedDrawingOption)
}
}
if (selectedDrawingOption) {
drawTip.value = t('vc.drawing.point.drawTip1')
canShowDrawTip.value = true
}
nextTick(() => {
emit(
'drawEvt',
{
index: index,
points: points,
name: 'point',
finished: true,
position: points.value[index].position,
windowPoistion: movement,
type: type
},
viewer
)
})
}
}
const handleMouseMove = movement => {
if (!canShowDrawTip.value) {
return
}
const { viewer, getWorldPosition } = $services
const scene = viewer.scene
const { defined, SceneMode } = Cesium
if (scene.mode !== SceneMode.MORPHING) {
const position = getWorldPosition(scene, movement, {} as any)
if (!defined(position)) {
return
}
drawTipPosition.value = position
if (drawStatus.value === DrawStatus.AfterDraw) {
startNew()
}
if (drawStatus.value !== DrawStatus.Drawing) {
return
}
const index = editingPoint.value ? editingPoint.value._vcPolylineIndx : points.value.length - 1
const point: PointDrawing = points.value[index]
const type = editingPoint.value ? editorType : 'new'
point.position = position
point.show = true
nextTick(() => {
emit(
'drawEvt',
{
index: index,
points: point,
name: 'point',
finished: false,
position: position,
windowPoistion: movement,
type: type
},
viewer
)
})
}
}
const onMouseoverPoints = e => {
const { drawingHandlerActive, viewer } = $services
if (props.editable && drawStatus.value !== DrawStatus.Drawing && drawingHandlerActive) {
e.pickedFeature.primitive.pixelSize = props.pointOpts?.pixelSize * 1.5
removeTimeout()
registerTimeout(() => {
mouseoverPoint.value = e.pickedFeature.primitive
editorPosition.value = e.pickedFeature.primitive.position
showEditor.value = true
canShowDrawTip.value = false
drawTipPosition.value = [0, 0, 0]
}, props.editorOpts?.delay)
}
emit(
'mouseEvt',
{
type: e.type,
name: 'point',
target: e
},
viewer
)
}
const onMouseoutPoints = e => {
const { viewer, selectedDrawingOption } = $services
if (props.editable) {
e.pickedFeature.primitive.pixelSize = props.pointOpts?.pixelSize * 1.0
removeTimeout()
registerTimeout(() => {
editorPosition.value = [0, 0, 0]
mouseoverPoint.value = undefined
showEditor.value = false
}, props.editorOpts?.hideDelay)
selectedDrawingOption && (canShowDrawTip.value = true)
}
emit(
'mouseEvt',
{
type: e.type,
name: 'point',
target: e
},
viewer
)
}
const onMouseenterEditor = evt => {
removeTimeout()
}
const onMouseleaveEditor = evt => {
removeTimeout()
registerTimeout(() => {
editorPosition.value = [0, 0, 0]
mouseoverPoint.value.pixelSize = props.pointOpts?.pixelSize * 1.0
mouseoverPoint.value = undefined
showEditor.value = false
}, props.editorOpts?.hideDelay)
}
const onEditorClick = e => {
editorPosition.value = [0, 0, 0]
showEditor.value = false
if (!props.editable) {
return
}
editorType = e
const { viewer, drawingVm } = $services
if (e === 'move') {
drawTip.value = t('vc.drawing.point.drawTip3')
drawStatus.value = DrawStatus.Drawing
editingPoint.value = mouseoverPoint.value
canShowDrawTip.value = true
restorePoint = Object.assign({}, points.value[editingPoint.value._vcPolylineIndx])
;(drawingVm?.proxy as any).editingDrawingName = 'point'
} else if (e === 'remove') {
const index = mouseoverPoint.value._vcPolylineIndx
points.value.splice(index, 1)
}
emit(
'editorEvt',
{
type: e,
name: 'point',
points: points,
index: mouseoverPoint.value._vcPolylineIndx
},
viewer
)
}
const clear = () => {
points.value = []
stop()
}
const onPrimitiveCollectionReady = ({ cesiumObject }) => {
cesiumObject._vcId = 'VcDrawingPoint'
}
// life cycle
onUnmounted(() => {
unwatchFns.forEach(item => item())
unwatchFns = []
})
// expose public methods
const publicMethods = { points, startNew, stop, clear, handleMouseClick, handleMouseMove }
Object.assign(instance.proxy, publicMethods)
return () => {
const { createGuid } = Cesium
const children: Array<VNode> = []
const pointsRender: Array<AnyObject> = []
points.value.forEach((point, index) => {
pointsRender.push({
show: point.show,
position: point.position,
id: createGuid(),
_vcPolylineIndx: index, // for editor
...props.pointOpts
})
})
children.push(
h(VcCollectionPoint, {
enableMouseEvent: props.enableMouseEvent,
points: pointsRender,
onMouseover: onMouseoverPoints,
onMouseout: onMouseoutPoints,
onReady: onVcCollectionPointReady
})
)
if (props.drawtip?.show && canShowDrawTip.value) {
const { viewer } = $services
children.push(
h(
VcOverlayHtml,
{
position: drawTipPosition.value,
pixelOffset: props.drawtip?.pixelOffset,
teleport: {
to: viewer.container
}
},
() =>
h(
'div',
{
class: 'vc-drawtip vc-tooltip--style'
},
drawTip.value
)
)
)
}
if (showEditor.value) {
const buttons: Array<VNode> = []
if (mouseoverPoint.value) {
const editorOpts = props.editorOpts
for (const key in editorOpts) {
if (!Array.isArray(editorOpts[key]) && typeof editorOpts[key] !== 'number') {
const opts = {
...editorOpts[key]
}
delete opts.color
buttons.push(
h(
VcBtn,
{
style: { color: editorOpts[key].color, background: editorOpts[key].background },
...opts,
onclick: onEditorClick.bind(undefined, key)
},
() =>
h(
VcTooltip,
{
...editorOpts[key].tooltip
},
() => h('strong', null, editorOpts[key].tooltip?.tip || t(`vc.drawing.editor.${key}`))
)
)
)
}
}
}
const { viewer } = $services
children.push(
h(
VcOverlayHtml,
{
position: editorPosition.value,
pixelOffset: props.editorOpts?.pixelOffset,
teleport: {
to: viewer.container
},
onMouseenter: onMouseenterEditor,
onMouseleave: onMouseleaveEditor
},
() =>
h(
'div',
{
class: 'vc-editor'
},
buttons
)
)
)
}
return h(
VcCollectionPrimitive,
{
ref: primitiveCollectionRef,
show: props.show,
onReady: onPrimitiveCollectionReady
},
() => children
)
}
}
}) | the_stack |
import * as d3 from 'd3';
// import { Area } from 'd3';
interface ConfigI {
minValue: number;
maxValue: number;
circleThickness: number;
circleFillGap: number;
circleColor: string;
waveHeight: number;
waveCount: number;
waveRiseTime: number;
waveAnimateTime: number;
waveRise : boolean;
waveHeightScaling: boolean;
waveAnimate: boolean;
waveColor: string;
waveOffset: number;
textVertPosition: number;
textSize: number;
valueCountUp: boolean;
displayPercent: boolean;
textColor: string;
waveTextColor: string;
}
export function liquidFillGaugeDefaultSettings() : ConfigI {
return {
minValue: 0, // The gauge minimum value.
maxValue: 100, // The gauge maximum value.
circleThickness: 0.05, // The outer circle thickness as a percentage of it's radius.
// The size of the gap between the outer circle and wave circle as a percentage of
// the outer circles radius.
circleFillGap: 0.05,
circleColor: "#178BCA", // The color of the outer circle.
waveHeight: 0.05, // The wave height as a percentage of the radius of the wave circle.
waveCount: 1, // The number of full waves per width of the wave circle.
waveRiseTime: 1000, // The amount of time in milliseconds for the wave to rise from 0 to it's final height.
waveAnimateTime: 18000, // The amount of time in milliseconds for a full wave to enter the wave circle.
// Control if the wave should rise from 0 to it's full height, or start at it's full height.
waveRise: true,
// Controls wave size scaling at low and high fill percentages.
// When true, wave height reaches it's maximum at 50% fill, and minimum at 0% and 100% fill.
// This helps to prevent the wave from making the wave circle from appear totally full or empty
// when near it's minimum or maximum fill.
waveHeightScaling: true,
waveAnimate: true, // Controls if the wave scrolls or is static.
waveColor: "#178BCA", // The color of the fill wave.
waveOffset: 0, // The amount to initially offset the wave. 0 = no offset. 1 = offset of one full wave.
// The height at which to display the percentage text withing the wave circle. 0 = bottom, 1 = top.
textVertPosition: .5,
textSize: 1, // The relative height of the text to display in the wave circle. 1 = 50%
// If true, the displayed value counts up from 0 to it's final value upon loading.
// If false, the final value is displayed.
valueCountUp: true,
displayPercent: true, // If true, a % symbol is displayed after the value.
textColor: "#045681", // The color of the value text when the wave does not overlap it.
waveTextColor: "#A4DBf8" // The color of the value text when the wave overlaps it.
};
}
export function loadLiquidFillGauge(elementId: string, value:any, config:ConfigI, post:number) {
if (config === undefined) { config = liquidFillGaugeDefaultSettings(); }
const gauge = d3.select("#" + elementId);
const radius = Math.min(parseInt(gauge.style("width")), parseInt(gauge.style("height"))) / 2;
const locationX = parseInt(gauge.style("width")) / 2 - radius;
const locationY = parseInt(gauge.style("height")) / 2 - radius;
const fillPercent = value / config.maxValue;
let waveHeightScale : d3.ScaleLinear<number, number>;
if (config.waveHeightScaling) {
waveHeightScale = d3.scaleLinear()
.range([0, config.waveHeight, 0])
.domain([0, 50, 100]);
} else {
waveHeightScale = d3.scaleLinear()
.range([config.waveHeight, config.waveHeight])
.domain([0, 100]);
}
const textPixels = (config.textSize * radius / 2);
const textFinalValue = parseFloat(value).toFixed(2);
const textStartValue = config.valueCountUp ? config.minValue : textFinalValue;
const percentText = config.displayPercent ? "" : "";
const circleThickness = config.circleThickness * radius;
const circleFillGap = config.circleFillGap * radius;
const fillCircleMargin = circleThickness + circleFillGap;
const fillCircleRadius = radius - fillCircleMargin;
const waveHeight = fillCircleRadius * waveHeightScale(fillPercent * 100);
const waveLength = fillCircleRadius * 2 / config.waveCount;
const waveClipCount = 1 + config.waveCount;
const waveClipWidth = waveLength * waveClipCount;
// Rounding functions so that the correct number of decimal places is always displayed
// as the value counts up.
const format = d3.format(".0f");
// Data for building the clip wave area.
const data = [];
for (let i = 0; i <= 40 * waveClipCount; i++) {
data.push({x: i / (40 * waveClipCount), y: (i / (40))});
}
// Scales for drawing the outer circle.
const gaugeCircleX = d3.scaleLinear().range([0, 2 * Math.PI]).domain([0, 1]);
const gaugeCircleY = d3.scaleLinear().range([0, radius]).domain([0, radius]);
// Scales for controlling the size of the clipping path.
const waveScaleX = d3.scaleLinear().range([0, waveClipWidth]).domain([0, 1]);
const waveScaleY = d3.scaleLinear().range([0, waveHeight]).domain([0, 1]);
// Scales for controlling the position of the clipping path.
const waveRiseScale = d3.scaleLinear()
// The clipping area size is the height of the fill circle + the wave height,
// so we position the clip wave such that the it will overlap the fill circle
// at all when at 0%, and will totally cover the fill circle at 100%.
.range([(fillCircleMargin + fillCircleRadius * 2 + waveHeight), (fillCircleMargin - waveHeight)])
.domain([0, 1]);
const waveAnimateScale = d3.scaleLinear()
.range([0, waveClipWidth - fillCircleRadius * 2]) // Push the clip area one full wave then snap back.
.domain([0, 1]);
// Scale for controlling the position of the text within the gauge.
const textRiseScaleY = d3.scaleLinear()
.range([fillCircleMargin + fillCircleRadius * 2,(fillCircleMargin + textPixels * 0.7)])
.domain([0, 1]);
// Center the gauge within the parent SVG.
const gaugeGroup = gauge.append("g")
.attr('transform','translate(' + locationX + ',' + locationY + ')');
// console.log('GAUAGE GROUP: ', locationX);
// Draw the outer circle.
const gaugeCircleArc = d3.arc()
.startAngle(gaugeCircleX(0))
.endAngle(gaugeCircleX(1))
.outerRadius(gaugeCircleY(radius))
.innerRadius(gaugeCircleY(radius - circleThickness));
gaugeGroup.append("path")
.attr("d", <any>gaugeCircleArc)
.style("fill", config.circleColor)
.attr('transform','translate(' + radius + ',' + radius + ')');
// console.log('gaugeGroup append PATH: ', radius);
// Text where the wave does not overlap.
gaugeGroup.append("text")
.text(format(<any>textStartValue) + percentText)
.attr("class", "liquidFillGaugeText")
.attr("text-anchor", "middle")
.attr("font-size", textPixels + "px")
.style("fill", config.textColor)
.attr('transform','translate(' + radius + ',' + textRiseScaleY(config.textVertPosition) + ')');
// The clipping wave area.
const clipArea = d3.area()
.x(function(d:any) { return waveScaleX(d.x); })
.y0(function(d:any) { return waveScaleY(Math.sin(Math.PI * 2 * config.waveOffset * -1 + Math.PI * 2 * (1 - config.waveCount) + d.y * 2 * Math.PI));})
.y1(function(d) { return (fillCircleRadius *2 + waveHeight); });
const waveGroup = gaugeGroup.append("defs")
.append("clipPath")
.attr("id", "clipWave" + elementId);
const wave = waveGroup.append("path")
.datum(data)
.attr("d", <any>clipArea)
.attr("T", 0);
// The inner circle with the clipping wave attached.
const fillCircleGroup = gaugeGroup.append("g")
.attr("clip-path", "url(" + location.href + "#clipWave" + elementId + ")");
fillCircleGroup.append("circle")
.attr("cx", radius)
.attr("cy", radius)
.attr("r", fillCircleRadius)
.style("fill", config.waveColor);
// Text where the wave does overlap.
fillCircleGroup.append("text")
.text(format(<any>textStartValue))
.attr("class", "liquidFillGaugeText")
.attr("text-anchor", "middle")
.attr("font-size", textPixels + "px")
.style("fill", config.waveTextColor)
.attr('transform','translate(' + radius + ',' + textRiseScaleY(config.textVertPosition) + ')');
// Make the value count up.
if (config.valueCountUp) {
gaugeGroup.selectAll("text.liquidFillGaugeText").transition()
.duration(config.waveRiseTime)
.tween("text", function(d) {
var that = d3.select(this);
var i = d3.interpolateNumber(<any>that.text().replace("", ""), <any>textFinalValue);
return function(t) { that.text(format(i(t)) + percentText); };
});
}
// Make the wave rise. wave and waveGroup are separate so that horizontal and vertical movement
// can be controlled independently.
const waveGroupXPosition = fillCircleMargin + fillCircleRadius * 2 - waveClipWidth;
if (config.waveRise) {
waveGroup.attr('transform','translate(' + waveGroupXPosition + ',' + waveRiseScale(0) + ')')
.transition()
.duration(config.waveRiseTime)
.attr('transform','translate(' + waveGroupXPosition + ',' + waveRiseScale(fillPercent) + ')')
.on("start", function() { wave.attr('transform','translate(1,0)'); });
// This transform is necessary to get the clip wave positioned correctly when
// waveRise=true and waveAnimate=false. The wave will not position correctly without
// this, but it's not clear why this is actually necessary.
} else {
waveGroup.attr('transform','translate(' + waveGroupXPosition + ',' + waveRiseScale(fillPercent) + ')');
}
if(config.waveAnimate) { animateWave(); }
function animateWave() {
wave.attr('transform','translate(' + waveAnimateScale(<any>wave.attr('T')) + ',0)');
wave.transition()
.duration(config.waveAnimateTime * (1 - <any>wave.attr('T')))
.ease(d3.easeLinear)
.attr('transform','translate(' + waveAnimateScale(1) + ',0)')
.attr('T', 1)
.on('end', function() {
wave.attr('T', 0);
if (config.waveAnimate) { animateWave(); }
});
}
class GaugeUpdater {
constructor(){
}
setWaveAnimate(value:boolean) {
// Note: must call update after setting value
config.waveAnimate = value;
}
update (value:number) {
gaugeGroup.selectAll("text.liquidFillGaugeText").transition()
.duration(config.waveRiseTime)
.tween("text", function(d) {
var that = d3.select(this);
var i = d3.interpolateNumber(<any>that.text().replace("%", ""), value);
return function(t) { that.text(format(i(t)) + percentText); };
});
var fillPercent = Math.max(config.minValue, Math.min(config.maxValue, value)) / config.maxValue;
var waveHeight = fillCircleRadius * waveHeightScale(fillPercent * 100);
var waveRiseScale = d3.scaleLinear()
// The clipping area size is the height of the fill circle + the wave height, so we position
// the clip wave such that the it will overlap the fill circle at all when at 0%, and will
// totally cover the fill circle at 100%.
.range([(fillCircleMargin + fillCircleRadius * 2 + waveHeight), (fillCircleMargin - waveHeight)])
.domain([0,1]);
var newHeight = waveRiseScale(fillPercent);
var waveScaleX = d3.scaleLinear().range([0, waveClipWidth]).domain([0, 1]);
var waveScaleY = d3.scaleLinear().range([0, waveHeight]).domain([0, 1]);
var newClipArea;
if (config.waveHeightScaling) {
newClipArea = d3.area()
.x(function(d:any) { return waveScaleX(d.x); } )
.y0(function(d:any) {
return waveScaleY(Math.sin(
Math.PI * 2 * config.waveOffset * -1 + Math.PI * 2 * (1 - config.waveCount) + d.y * 2 * Math.PI));
})
.y1(function(d) { return (fillCircleRadius * 2 + waveHeight); });
} else {
newClipArea = clipArea;
}
var newWavePosition = config.waveAnimate ? waveAnimateScale(1) : 0;
wave.transition()
.duration(0)
.transition()
.duration(config.waveAnimate ? (config.waveAnimateTime * (1 - <any>wave.attr('T'))) : config.waveRiseTime)
.ease(d3.easeLinear)
.attr('d', <any>newClipArea)
.attr('transform','translate(' + newWavePosition + ',0)')
.attr('T','1')
.on("end", function() {
if (config.waveAnimate) {
wave.attr('transform','translate(' + waveAnimateScale(0) + ',0)');
animateWave();
}
});
waveGroup.transition()
.duration(config.waveRiseTime)
.attr('transform','translate(' + waveGroupXPosition + ',' + newHeight + ')');
}
}
return new GaugeUpdater();
} | the_stack |
import { ComparisonOperator, GraphWidget, MathExpression, Metric, MetricOptions, Statistic, TreatMissingData, IWidget } from '@aws-cdk/aws-cloudwatch';
import { Rule, Schedule } from '@aws-cdk/aws-events';
import { LambdaFunction } from '@aws-cdk/aws-events-targets';
import { Tracing } from '@aws-cdk/aws-lambda';
import { SqsEventSource } from '@aws-cdk/aws-lambda-event-sources';
import { BlockPublicAccess, IBucket } from '@aws-cdk/aws-s3';
import { Queue, QueueEncryption } from '@aws-cdk/aws-sqs';
import { Construct, Duration } from '@aws-cdk/core';
import { lambdaFunctionUrl, s3ObjectUrl, sqsQueueUrl } from '../deep-link';
import { fillMetric } from '../metric-utils';
import { IMonitoring } from '../monitoring/api';
import type { IPackageSource, PackageSourceBindOptions, PackageSourceBindResult } from '../package-source';
import { RUNBOOK_URL } from '../runbook-url';
import { S3StorageFactory } from '../s3/storage';
import { NpmJsPackageCanary } from './npmjs/canary';
import { MARKER_FILE_NAME, METRICS_NAMESPACE, MetricName, S3KeyPrefix } from './npmjs/constants.lambda-shared';
import { NpmJsFollower } from './npmjs/npm-js-follower';
import { StageAndNotify } from './npmjs/stage-and-notify';
/**
* The periodicity at which the NpmJs follower will run. This MUST be a valid
* CloudWatch Metric grain, as this will also be the period of the CloudWatch
* alarm that montiors the health of the follower.
*/
const FOLLOWER_RUN_RATE = Duration.minutes(5);
export interface NpmJsProps {
/**
* The bucket to use for staging npm packages.
*
* @default - a new bucket will be created.
*/
readonly stagingBucket?: IBucket;
/**
* Registers a package canary, which will track availability of a canary
* package in ConstructHub, and emit dedicated metrics.
*
* @default true
*/
readonly enableCanary?: boolean;
/**
* The package that is monitored by the package canary, if enabled by
* `enableCanary`.
*
* @default 'construct-hub-probe'
*/
readonly canaryPackage?: string;
/**
* The maximum amount of time it is supposed to take for packages to become
* visible in this ConstructHub instance. If `enableCanary` is enabled, an
* alarm will trigger if this SLA is breached by the `canaryPackage`.
*
* @default Duration.minutes(5)
*/
readonly canarySla?: Duration;
}
/**
* A package source that gets package data from the npmjs.com package registry.
*/
export class NpmJs implements IPackageSource {
public constructor(private readonly props: NpmJsProps = {}) {}
public bind(
scope: Construct,
{ baseUrl, denyList, ingestion, licenseList, monitoring, queue, repository }: PackageSourceBindOptions,
): PackageSourceBindResult {
repository?.addExternalConnection('public:npmjs');
const storageFactory = S3StorageFactory.getOrCreate(scope);
const bucket = this.props.stagingBucket || storageFactory.newBucket(scope, 'NpmJs/StagingBucket', {
blockPublicAccess: BlockPublicAccess.BLOCK_ALL,
enforceSSL: true,
lifecycleRules: [{ prefix: S3KeyPrefix.STAGED_KEY_PREFIX, expiration: Duration.days(30) }],
});
bucket.grantRead(ingestion);
const stager = new StageAndNotify(scope, 'NpmJs-StageAndNotify', {
deadLetterQueue: new Queue(scope, 'StagerDLQ', {
encryption: QueueEncryption.KMS_MANAGED,
retentionPeriod: Duration.days(14),
visibilityTimeout: Duration.minutes(15),
}),
description: `[${scope.node.path}/NpmJS-StageAndNotify] Stages tarballs to S3 and notifies ConstructHub`,
environment: {
AWS_EMF_ENVIRONMENT: 'Local',
BUCKET_NAME: bucket.bucketName,
QUEUE_URL: queue.queueUrl,
},
memorySize: 10_024, // 10GiB
retryAttempts: 2,
timeout: Duration.minutes(5),
tracing: Tracing.ACTIVE,
});
bucket.grantReadWrite(stager);
denyList?.grantRead(stager);
queue.grantSendMessages(stager);
stager.addEventSource(new SqsEventSource(stager.deadLetterQueue!, { batchSize: 1, enabled: false }));
const follower = new NpmJsFollower(scope, 'NpmJs', {
description: `[${scope.node.path}/NpmJs] Periodically query npmjs.com index for new packages`,
environment: {
AWS_EMF_ENVIRONMENT: 'Local',
BUCKET_NAME: bucket.bucketName,
FUNCTION_NAME: stager.functionName,
},
memorySize: 10_024, // 10 GiB
reservedConcurrentExecutions: 1, // Only one execution at a time, to avoid race conditions on the S3 marker object
timeout: FOLLOWER_RUN_RATE,
tracing: Tracing.ACTIVE,
});
bucket.grantReadWrite(follower, MARKER_FILE_NAME);
denyList?.grantRead(follower);
licenseList.grantRead(follower);
stager.grantInvoke(follower);
const rule = new Rule(scope, 'NpmJs/Schedule', {
description: `${scope.node.path}/NpmJs/Schedule`,
schedule: Schedule.rate(FOLLOWER_RUN_RATE),
targets: [new LambdaFunction(follower)],
});
this.registerAlarms(scope, follower, stager, monitoring, rule);
return {
name: follower.node.path,
links: [
{ name: 'NpmJs Follower', url: lambdaFunctionUrl(follower), primary: true },
{ name: 'Marker Object', url: s3ObjectUrl(bucket, MARKER_FILE_NAME) },
{ name: 'Stager', url: lambdaFunctionUrl(stager) },
{ name: 'Stager DLQ', url: sqsQueueUrl(stager.deadLetterQueue!) },
],
dashboardWidgets: [
[
new GraphWidget({
height: 6,
width: 12,
title: 'Follower Health',
left: [
fillMetric(follower.metricInvocations({ label: 'Invocations' })),
fillMetric(follower.metricErrors({ label: 'Errors' })),
],
leftYAxis: { min: 0 },
right: [
this.metricRemainingTime({ label: 'Remaining Time' }),
],
rightYAxis: { min: 0 },
period: Duration.minutes(5),
}),
new GraphWidget({
height: 6,
width: 12,
title: 'Stager Health',
left: [
fillMetric(stager.metricInvocations({ label: 'Invocations' })),
fillMetric(stager.metricErrors({ label: 'Errors' })),
],
leftYAxis: { min: 0 },
right: [
stager.metricDuration({ label: 'Duration' }),
],
rightYAxis: { min: 0 },
period: Duration.minutes(5),
}),
], [
new GraphWidget({
height: 6,
width: 12,
title: 'CouchDB Follower',
left: [
fillMetric(this.metricChangeCount({ label: 'Change Count' }), 0),
fillMetric(this.metricUnprocessableEntity({ label: 'Unprocessable' }), 0),
],
leftYAxis: { min: 0 },
right: [
fillMetric(this.metricNpmJsChangeAge({ label: 'Lag to npmjs.com' }), 'REPEAT'),
fillMetric(this.metricPackageVersionAge({ label: 'Package Version Age' }), 'REPEAT'),
],
rightYAxis: { label: 'Milliseconds', min: 0, showUnits: false },
period: Duration.minutes(5),
}),
new GraphWidget({
height: 6,
width: 12,
title: 'CouchDB Changes',
left: [
fillMetric(this.metricLastSeq({ label: 'Last Sequence Number' }), 'REPEAT'),
],
period: Duration.minutes(5),
}),
], [
new GraphWidget({
height: 6,
width: 12,
title: 'Stager Dead-Letter Queue',
left: [
fillMetric(stager.deadLetterQueue!.metricApproximateNumberOfMessagesVisible({ label: 'Visible Messages' }), 0),
fillMetric(stager.deadLetterQueue!.metricApproximateNumberOfMessagesNotVisible({ label: 'Invisible Messages' }), 0),
],
leftYAxis: { min: 0 },
right: [
stager.deadLetterQueue!.metricApproximateAgeOfOldestMessage({ label: 'Oldest Message' }),
],
rightYAxis: { min: 0 },
period: Duration.minutes(1),
}),
...((this.props.enableCanary ?? true)
? this.registerCanary(
follower,
this.props.canaryPackage ?? 'construct-hub-probe',
this.props.canarySla ?? Duration.minutes(5),
bucket,
baseUrl,
monitoring,
)
: []),
],
],
};
}
/**
* The average time it took to process a changes batch.
*/
public metricBatchProcessingTime(opts?: MetricOptions): Metric {
return new Metric({
period: Duration.minutes(1),
statistic: Statistic.AVERAGE,
...opts,
metricName: MetricName.BATCH_PROCESSING_TIME,
namespace: METRICS_NAMESPACE,
});
}
/**
* The total count of changes that were processed.
*/
public metricChangeCount(opts?: MetricOptions): Metric {
return new Metric({
period: Duration.minutes(1),
statistic: Statistic.SUM,
...opts,
metricName: MetricName.CHANGE_COUNT,
namespace: METRICS_NAMESPACE,
});
}
/**
* The last sequence number that was processed. This metric can be used to
* discover when a sequence reset has happened in the CouchDB instance.
*/
public metricLastSeq(opts?: MetricOptions): Metric {
return new Metric({
period: Duration.minutes(1),
statistic: Statistic.MAXIMUM,
...opts,
metricName: MetricName.LAST_SEQ,
namespace: METRICS_NAMESPACE,
});
}
public metricNpmJsChangeAge(opts?: MetricOptions): Metric {
return new Metric({
period: Duration.minutes(1),
statistic: Statistic.MINIMUM,
...opts,
metricName: MetricName.NPMJS_CHANGE_AGE,
namespace: METRICS_NAMESPACE,
});
}
/**
* The age of the oldest package version that was processed.
*/
public metricPackageVersionAge(opts?: MetricOptions): Metric {
return new Metric({
period: Duration.minutes(1),
statistic: Statistic.MAXIMUM,
...opts,
metricName: MetricName.PACKAGE_VERSION_AGE,
namespace: METRICS_NAMESPACE,
});
}
/**
* The total count of package versions that were inspected.
*/
public metricPackageVersionCount(opts?: MetricOptions): Metric {
return new Metric({
period: Duration.minutes(1),
statistic: Statistic.SUM,
...opts,
metricName: MetricName.PACKAGE_VERSION_COUNT,
namespace: METRICS_NAMESPACE,
});
}
/**
* The total count of package versions that were deemed relevant.
*/
public metricRelevantPackageVersions(opts?: MetricOptions): Metric {
return new Metric({
period: Duration.minutes(1),
statistic: Statistic.SUM,
...opts,
metricName: MetricName.RELEVANT_PACKAGE_VERSIONS,
namespace: METRICS_NAMESPACE,
});
}
/**
* The amount of time that was remaining when the lambda returned in order to
* avoid hitting a timeout.
*/
public metricRemainingTime(opts?: MetricOptions): Metric {
return new Metric({
period: Duration.minutes(5),
statistic: Statistic.MINIMUM,
...opts,
metricName: MetricName.REMAINING_TIME,
namespace: METRICS_NAMESPACE,
});
}
/**
* The amount of changes that were not processed due to having an invalid
* format.
*/
public metricUnprocessableEntity(opts?: MetricOptions): Metric {
return new Metric({
period: Duration.minutes(1),
statistic: Statistic.SUM,
...opts,
metricName: MetricName.UNPROCESSABLE_ENTITY,
namespace: METRICS_NAMESPACE,
});
}
private registerAlarms(scope: Construct, follower: NpmJsFollower, stager: StageAndNotify, monitoring: IMonitoring, schedule: Rule) {
const failureAlarm = follower.metricErrors().createAlarm(scope, 'NpmJs/Follower/Failures', {
alarmName: `${scope.node.path}/NpmJs/Follower/Failures`,
alarmDescription: [
'The NpmJs follower function failed!',
'',
`RunBook: ${RUNBOOK_URL}`,
'',
`Direct link to Lambda function: ${lambdaFunctionUrl(follower)}`,
].join('\n'),
comparisonOperator: ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD,
evaluationPeriods: 3,
threshold: 1,
treatMissingData: TreatMissingData.MISSING,
});
monitoring.addHighSeverityAlarm('NpmJs/Follower Failures', failureAlarm);
const notRunningAlarm = follower.metricInvocations({ period: FOLLOWER_RUN_RATE })
.createAlarm(scope, 'NpmJs/Follower/NotRunning', {
alarmName: `${scope.node.path}/NpmJs/Follower/NotRunning`,
alarmDescription: [
'The NpmJs follower function is not running!',
'',
`RunBook: ${RUNBOOK_URL}`,
'',
`Direct link to Lambda function: ${lambdaFunctionUrl(follower)}`,
].join('\n'),
comparisonOperator: ComparisonOperator.LESS_THAN_THRESHOLD,
evaluationPeriods: 2,
threshold: 1,
treatMissingData: TreatMissingData.BREACHING,
});
monitoring.addHighSeverityAlarm('NpmJs/Follower Not Running', notRunningAlarm);
// The period for this alarm needs to match the scheduling interval of the
// follower, otherwise the metric will be too sparse to properly detect
// problems.
const noChangeAlarm = this.metricChangeCount({ period: FOLLOWER_RUN_RATE })
.createAlarm(scope, 'NpmJs/Follower/NoChanges', {
alarmName: `${scope.node.path}/NpmJs/Follower/NoChanges`,
alarmDescription: [
'The NpmJs follower function is no discovering any changes from CouchDB!',
'',
`RunBook: ${RUNBOOK_URL}`,
'',
`Direct link to Lambda function: ${lambdaFunctionUrl(follower)}`,
].join('\n'),
comparisonOperator: ComparisonOperator.LESS_THAN_THRESHOLD,
evaluationPeriods: 2,
threshold: 1,
// If the metric is not emitted, it can be assumed to be zero.
treatMissingData: TreatMissingData.BREACHING,
});
monitoring.addLowSeverityAlarm('Np npmjs.com changes discovered', noChangeAlarm);
const dlqNotEmptyAlarm = new MathExpression({
expression: 'mVisible + mHidden',
usingMetrics: {
mVisible: stager.deadLetterQueue!.metricApproximateNumberOfMessagesVisible({ period: Duration.minutes(1) }),
mHidden: stager.deadLetterQueue!.metricApproximateNumberOfMessagesNotVisible({ period: Duration.minutes(1) }),
},
}).createAlarm(scope, `${scope.node.path}/NpmJs/Stager/DLQNotEmpty`, {
alarmName: `${scope.node.path}/NpmJs/Stager/DLQNotEmpty`,
alarmDescription: [
'The NpmJS package stager is failing - its dead letter queue is not empty',
'',
`Link to the lambda function: ${lambdaFunctionUrl(stager)}`,
`Link to the dead letter queue: ${sqsQueueUrl(stager.deadLetterQueue!)}`,
'',
`Runbook: ${RUNBOOK_URL}`,
].join('/n'),
comparisonOperator: ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD,
evaluationPeriods: 1,
threshold: 1,
treatMissingData: TreatMissingData.NOT_BREACHING,
});
monitoring.addLowSeverityAlarm('NpmJs/Stager DLQ Not Empty', dlqNotEmptyAlarm);
// Finally - the "not running" alarm depends on the schedule (it won't run until the schedule
// exists!), and the schedule depends on the failure alarm existing (we don't want it to run
// before we can know it is failing). This means the returned `IDependable` effectively ensures
// all alarms have been provisionned already! Isn't it nice!
notRunningAlarm.node.addDependency(schedule);
schedule.node.addDependency(failureAlarm);
}
private registerCanary(
scope: Construct,
packageName: string,
visibilitySla: Duration,
bucket: IBucket,
constructHubBaseUrl: string,
monitoring: IMonitoring,
): IWidget[] {
const canary = new NpmJsPackageCanary(scope, 'Canary', { bucket, constructHubBaseUrl, packageName });
const alarm = new MathExpression({
expression: 'MAX([mDwell, mTTC])',
period: Duration.minutes(1),
usingMetrics: {
mDwell: canary.metricDwellTime(),
mTTC: canary.metricTimeToCatalog(),
},
}).createAlarm(canary, 'Alarm', {
alarmName: `${canary.node.path}/SLA-Breached`,
alarmDescription: [
`New versions of ${packageName} have been published over ${visibilitySla.toHumanString()} ago and are still not visible in construct hub`,
`Runbook: ${RUNBOOK_URL}`,
].join('\n'),
comparisonOperator: ComparisonOperator.GREATER_THAN_THRESHOLD,
evaluationPeriods: 2,
// If there is no data, the canary might not be running, so... *Chuckles* we're in danger!
treatMissingData: TreatMissingData.BREACHING,
threshold: visibilitySla.toSeconds(),
});
monitoring.addHighSeverityAlarm('New version visibility SLA breached', alarm);
return [
new GraphWidget({
height: 6,
width: 12,
title: 'Package Canary',
left: [
canary.metricDwellTime({ label: 'Dwell Time' }),
canary.metricTimeToCatalog({ label: 'Time to Catalog' }),
],
leftAnnotations: [{
color: '#ff0000',
label: `SLA (${visibilitySla.toHumanString()})`,
value: visibilitySla.toSeconds(),
}],
leftYAxis: { min: 0 },
right: [
canary.metricTrackedVersionCount({ label: 'Tracked Version Count' }),
],
rightYAxis: { min: 0 },
}),
];
}
} | the_stack |
import { InMemoryFile as InMemoryProjectFile } from "@atomist/automation-client/lib/project/mem/InMemoryFile";
import { InMemoryProject } from "@atomist/automation-client/lib/project/mem/InMemoryProject";
import { doWithAllMatches, findValues } from "@atomist/automation-client/lib/tree/ast/astUtils";
import * as assert from "power-assert";
import { DockerFileParser } from "../../../../lib/pack/docker/parse/DockerFileParser";
describe("Docker file parser", () => {
it("should parse valid", async () => {
const root = await DockerFileParser.toAst(new InMemoryProjectFile("Dockerfile", nodeDockerfile));
assert(!!root);
});
it("should parse valid with / path", async () => {
const root = await DockerFileParser.toAst(new InMemoryProjectFile("Dockerfile", dashedImage));
assert(!!root);
});
it("should parse docker image with only name", async () => {
// Special value to be able to find easily in logs
const image = `FROM nginx
COPY docker/nginx.conf /etc/nginx/nginx.conf
COPY resources/public /usr/share/nginx/html
EXPOSE 8080`;
const p = InMemoryProject.of({ path: "docker/Dockerfile", content: image });
const imageName: string[] = await findValues(p, DockerFileParser, "docker/Dockerfile", "//FROM/image/name");
assert.deepStrictEqual(imageName, ["nginx"]);
});
it("should query for image", async () => {
const p = InMemoryProject.of({ path: "Dockerfile", content: nginxDockerFile });
const images = await findValues(p, DockerFileParser, "Dockerfile", "//FROM/image");
assert.strictEqual(images[0], "debian:jessie");
});
it("should query for image with /", async () => {
const p = InMemoryProject.of({ path: "Dockerfile", content: dashedImage });
const images = await findValues(p, DockerFileParser, "Dockerfile", "//FROM/image");
assert.strictEqual(images[0], "adoptopenjdk/openjdk8-openj9");
});
it("should find single EXPOSE", async () => {
const p = InMemoryProject.of({ path: "Dockerfile", content: expose1 });
const exposes = await findValues(p, DockerFileParser, "Dockerfile", "//EXPOSE");
assert.strictEqual(exposes.length, 1);
assert.strictEqual(exposes[0], "EXPOSE 8080");
});
it("should find multiple EXPOSE", async () => {
const p = InMemoryProject.of({ path: "Dockerfile", content: dashedImage });
const exposes = await findValues(p, DockerFileParser, "Dockerfile", "//EXPOSE");
assert.strictEqual(exposes.length, 2);
assert.strictEqual(exposes[0], "EXPOSE 8080");
assert.strictEqual(exposes[1], "EXPOSE 8081");
});
it("should find multiple EXPOSE and show ports", async () => {
const p = InMemoryProject.of({ path: "Dockerfile", content: dashedImage });
const exposes = await findValues(p, DockerFileParser, "Dockerfile", "//EXPOSE/port");
assert.strictEqual(exposes.length, 2);
assert.strictEqual(exposes[0], "8080");
assert.strictEqual(exposes[1], "8081");
});
it("should query for image name", async () => {
const p = InMemoryProject.of({ path: "Dockerfile", content: nginxDockerFile });
const images = await findValues(p, DockerFileParser, "Dockerfile", "//FROM/image/name");
assert.strictEqual(images[0], "debian");
});
it("should find RUNs", async () => {
const p = InMemoryProject.of({ path: "Dockerfile", content: nginxDockerFile });
const runs = await findValues(p, DockerFileParser, "Dockerfile", "//RUN");
assert.strictEqual(runs.length, 2);
});
it("should find RUNs invoking rm", async () => {
const p = InMemoryProject.of({ path: "Dockerfile", content: nginxDockerFile });
const runs = await findValues(p, DockerFileParser, "Dockerfile", "//RUN[?removes]", {
removes: n => n.$value.includes("rm "),
});
assert.strictEqual(runs.length, 1);
});
it("should find MAINTAINER", async () => {
const p = InMemoryProject.of({ path: "Dockerfile", content: nginxDockerFile });
const authors = await findValues(p, DockerFileParser, "Dockerfile", "//MAINTAINER");
assert.strictEqual(authors.length, 1);
assert.strictEqual(authors[0], `MAINTAINER NGINX Docker Maintainers "docker-maint@nginx.com"`);
});
it("should unpack MAINTAINER", async () => {
const p = InMemoryProject.of({ path: "Dockerfile", content: nginxDockerFile });
const authors = await findValues(p, DockerFileParser, "Dockerfile", "//MAINTAINER/maintainer");
assert.strictEqual(authors.length, 1);
assert.strictEqual(authors[0], `NGINX Docker Maintainers "docker-maint@nginx.com"`);
});
it("should return LABELs", async () => {
const p = InMemoryProject.of({ path: "Dockerfile", content: nodeDockerfile });
const labels = await findValues(p, DockerFileParser, "Dockerfile", "//LABEL");
assert.strictEqual(labels.length, 4);
});
it("should unpack LABELs", async () => {
const p = InMemoryProject.of({ path: "Dockerfile", content: nodeDockerfile });
const labelPairs = await findValues(p, DockerFileParser, "Dockerfile", "//LABEL/pair");
assert.strictEqual(labelPairs.length, 4);
assert.strictEqual(labelPairs[0], `"com.example.vendor"="ACME Incorporated"`);
const labelKeys = await findValues(p, DockerFileParser, "Dockerfile", "//LABEL/pair/key");
assert.strictEqual(labelKeys.length, 4);
assert.strictEqual(labelKeys[0], `com.example.vendor`);
const labelValues = await findValues(p, DockerFileParser, "Dockerfile", "//LABEL/pair/value");
assert.strictEqual(labelValues.length, 4);
assert.strictEqual(labelValues[0], `ACME Incorporated`);
const knownKeys = await findValues(
p,
DockerFileParser,
"Dockerfile",
"//LABEL/pair[/key[@value='com.example.vendor']]/value",
);
assert.strictEqual(knownKeys.length, 1);
assert.strictEqual(knownKeys[0], `ACME Incorporated`);
});
it("should update LABEL", async () => {
const p = InMemoryProject.of({ path: "Dockerfile", content: nodeDockerfile });
await doWithAllMatches(
p,
DockerFileParser,
"Dockerfile",
"//LABEL/pair[/key[@value='com.example.vendor']]/value",
n => (n.$value = "A.N. Other"),
);
const contentNow = p.findFileSync("Dockerfile").getContentSync();
assert.strictEqual(contentNow, nodeDockerfile.replace("ACME Incorporated", "A.N. Other"));
});
it("should allow path expression and modify", async () => {
const p = InMemoryProject.of({ path: "Dockerfile", content: nodeDockerfile });
await doWithAllMatches(p, DockerFileParser, "Dockerfile", "//FROM/image/tag", n => (n.$value = "xenon"));
const contentNow = p.findFileSync("Dockerfile").getContentSync();
assert.strictEqual(contentNow, nodeDockerfile.replace("argon", "xenon"));
});
it("should parse problematic file", async () => {
const p = InMemoryProject.of({ path: "Dockerfile", content: weave1 });
const images = await findValues(p, DockerFileParser, "Dockerfile", "//FROM/image");
assert.strictEqual(images.length, 1);
assert.strictEqual(images[0], "weaveworksdemos/msd-java:jre-latest");
});
const nodeDockerfile = `FROM node:argon
# Create app directory
RUN mkdir -p /usr/src/app
WORKDIR /usr/src/app
# Install app dependencies
COPY package.json /usr/src/app/
RUN npm install
LABEL "com.example.vendor"="ACME Incorporated"
LABEL com.example.label-with-value="foo"
LABEL version="1.0"
LABEL description="This text illustrates \\
that label-values can span multiple lines."
# Bundle app source
COPY . /usr/src/app
EXPOSE 8080
CMD [ "npm", "start" ]`;
const nginxDockerFile = `FROM debian:jessie
MAINTAINER NGINX Docker Maintainers "docker-maint@nginx.com"
ENV NGINX_VERSION 1.11.7-1~jessie
RUN apt-key adv --keyserver hkp://pgp.mit.edu:80 --recv-keys 573BFD6B3D8FBC641079A6ABABF5BD827BD9BF62 \\
&& echo "deb http://nginx.org/packages/mainline/debian/ jessie nginx" >> /etc/apt/sources.list \\
&& apt-get update \\
&& apt-get install --no-install-recommends --no-install-suggests -y \\
ca-certificates \\
nginx=\${NGINX_VERSION} \\
nginx-module-xslt \\
nginx-module-geoip \\
nginx-module-image-filter \\
nginx-module-perl \\
nginx-module-njs \\
gettext-base \\
&& rm -rf /var/lib/apt/lists/*
# forward request and error logs to docker log collector
RUN ln -sf /dev/stdout /var/log/nginx/access.log \\
&& ln -sf /dev/stderr /var/log/nginx/error.log
EXPOSE 80 443
CMD [ "nginx", "-g", "daemon off;" ]`;
const dashedImage = `
FROM adoptopenjdk/openjdk8-openj9
EXPOSE 8080
EXPOSE 8081`;
const expose1 = `
FROM thing
EXPOSE 8080
`;
const weave1 = `FROM weaveworksdemos/msd-java:jre-latest
WORKDIR /usr/src/app
COPY *.jar ./app.jar
RUN chown -R \${SERVICE_USER}:\${SERVICE_GROUP} ./app.jar
USER \${SERVICE_USER}
ARG BUILD_DATE
ARG BUILD_VERSION
ARG COMMIT
LABEL org.label-schema.vendor="Weaveworks" \\
org.label-schema.build-date="\${BUILD_DATE}" \\
org.label-schema.version="\${BUILD_VERSION}" \\
org.label-schema.name="Socks Shop: Cart" \\
org.label-schema.description="REST API for Cart service" \\
org.label-schema.url="https://github.com/microservices-demo/carts" \\
org.label-schema.vcs-url="github.com:microservices-demo/carts.git" \\
org.label-schema.vcs-ref="\${COMMIT}" \\
org.label-schema.schema-version="1.0"
ENTRYPOINT ["/usr/local/bin/java.sh","-jar","./app.jar", "--port=80"]`;
}); | the_stack |
import {BlockFrame} from '@/services/data/graphics/graphics-types';
import {
ConnectedSpace,
ConnectedSpaceBlockGraphics,
ConnectedSpaceBlockGraphicsRect,
ConnectedSpaceGraphics,
ReportGraphics,
SubjectGraphics,
TopicGraphics
} from '@/services/data/tuples/connected-space-types';
import {Report, ReportId} from '@/services/data/tuples/report-types';
import {Subject, SubjectId} from '@/services/data/tuples/subject-types';
import {Topic, TopicId} from '@/services/data/tuples/topic-types';
import {
BLOCK_FULL_PADDING_HORIZONTAL,
BLOCK_FULL_PADDING_VERTICAL,
BLOCK_GAP_HORIZONTAL,
BLOCK_GAP_VERTICAL,
BLOCK_HEIGHT_MIN,
BLOCK_MARGIN_HORIZONTAL,
BLOCK_MARGIN_VERTICAL,
BLOCK_NAME_OFFSET_Y,
BLOCK_WIDTH_MIN,
SELECTION_FULL_GAP,
SELECTION_GAP
} from './constants';
import {
AssembledConnectedSpaceGraphics,
AssembledReportGraphics,
AssembledSubjectGraphics,
AssembledTopicGraphics,
GraphicsRole
} from './types';
const cloneRectData = (rect: ConnectedSpaceBlockGraphicsRect): ConnectedSpaceBlockGraphicsRect => {
const {
coordinate: {x: coordinateX = 0, y: coordinateY = 0} = {x: 0, y: 0},
frame: {
x: frameX = 0,
y: frameY = 0,
width: frameWidth = BLOCK_WIDTH_MIN,
height: frameHeight = BLOCK_HEIGHT_MIN
} = {x: 0, y: 0, width: BLOCK_WIDTH_MIN, height: BLOCK_HEIGHT_MIN},
name: {x: nameX = BLOCK_WIDTH_MIN / 2, y: nameY = BLOCK_HEIGHT_MIN / 2} = {
x: BLOCK_WIDTH_MIN / 2,
y: BLOCK_HEIGHT_MIN / 2
}
} = rect;
return {
coordinate: {x: coordinateX, y: coordinateY},
frame: {x: frameX, y: frameY, width: frameWidth, height: frameHeight},
name: {x: nameX, y: nameY}
};
};
export const createInitGraphics = (options: {
topics: Array<Topic>;
subjects: Array<Subject>;
graphics?: ConnectedSpaceGraphics
}): AssembledConnectedSpaceGraphics => {
const {
topics, subjects,
graphics: {
topics: topicGraphics = [],
subjects: subjectGraphics = [],
reports: reportGraphics = []
} = {topics: [], subjects: [], reports: []}
} = options;
const topicGraphicsMap: Map<string, TopicGraphics> = topicGraphics.reduce((map, topic) => {
map.set(topic.topicId, topic);
return map;
}, new Map<string, TopicGraphics>());
const subjectGraphicsMap: Map<string, SubjectGraphics> = subjectGraphics.reduce((map, subject) => {
map.set(subject.subjectId, subject);
return map;
}, new Map<string, SubjectGraphics>());
const reportGraphicsMap: Map<string, ReportGraphics> = reportGraphics.reduce((map, report) => {
map.set(report.reportId, report);
return map;
}, new Map<string, ReportGraphics>());
return {
topics: topics.map(topic => {
const graphics = topicGraphicsMap.get(topic.topicId);
return graphics && graphics.rect ? {
topic,
rect: cloneRectData(graphics.rect)
} : {
topic,
rect: {
coordinate: {x: 0, y: 0},
frame: {x: 0, y: 0, width: BLOCK_WIDTH_MIN, height: BLOCK_HEIGHT_MIN},
name: {x: BLOCK_WIDTH_MIN / 2, y: BLOCK_HEIGHT_MIN / 2}
}
};
}),
subjects: subjects.map(subject => {
const graphics = subjectGraphicsMap.get(subject.subjectId);
return graphics && graphics.rect ? {
subject,
rect: cloneRectData(graphics.rect)
} : {
subject,
rect: {
coordinate: {x: 0, y: 0},
frame: {x: 0, y: 0, width: BLOCK_WIDTH_MIN, height: BLOCK_HEIGHT_MIN},
name: {x: BLOCK_WIDTH_MIN / 2, y: BLOCK_HEIGHT_MIN / 2}
}
};
}),
reports: (subjects.map(subject => subject.reports).filter(x => !!x).flat() as Array<Report>).map(report => {
const graphics = reportGraphicsMap.get(report.reportId);
return graphics && graphics.rect ? {
report,
rect: cloneRectData(JSON.parse(JSON.stringify(graphics.rect)))
} : {
report,
rect: {
coordinate: {x: 0, y: 0},
frame: {x: 0, y: 0, width: BLOCK_WIDTH_MIN, height: BLOCK_HEIGHT_MIN},
name: {x: BLOCK_WIDTH_MIN / 2, y: BLOCK_HEIGHT_MIN / 2}
}
};
})
};
};
export const asTopicGraphicsMap = (graphics: AssembledConnectedSpaceGraphics) => {
return graphics.topics.reduce((map, topicGraphics) => {
map.set(topicGraphics.topic.topicId, topicGraphics);
return map;
}, new Map<string, AssembledTopicGraphics>());
};
export const asSubjectGraphicsMap = (graphics: AssembledConnectedSpaceGraphics) => {
return graphics.subjects.reduce((map, subjectGraphics) => {
map.set(subjectGraphics.subject.subjectId, subjectGraphics);
return map;
}, new Map<string, AssembledSubjectGraphics>());
};
export const asReportGraphicsMap = (graphics: AssembledConnectedSpaceGraphics) => {
return graphics.reports.reduce((map, reportGraphics) => {
map.set(reportGraphics.report.reportId, reportGraphics);
return map;
}, new Map<string, AssembledReportGraphics>());
};
/** topic frame size */
export const computeBlockFrameSize = (nameRect: DOMRect) => {
return {
width: Math.max(nameRect.width + BLOCK_FULL_PADDING_HORIZONTAL, BLOCK_WIDTH_MIN),
height: nameRect.height + BLOCK_FULL_PADDING_VERTICAL
};
};
/** topic name position relative to topic rect */
export const computeBlockNamePosition = (frame: BlockFrame) => {
return {x: frame.width / 2, y: frame.height / 2 + BLOCK_NAME_OFFSET_Y};
};
const redressGraphics = <T extends ConnectedSpaceBlockGraphics>(
graphics: Array<T>,
getName: (graphics: T) => string,
initX: number
) => {
graphics.filter(graphics => graphics.rect.coordinate.x === 0)
.sort((t1, t2) => getName(t1).toLowerCase().localeCompare(getName(t2).toLowerCase()))
.reduce((top, topicGraphics) => {
topicGraphics.rect.coordinate = {x: initX, y: top};
top += topicGraphics.rect.frame.height + BLOCK_GAP_VERTICAL;
return top;
}, BLOCK_MARGIN_VERTICAL);
};
const computeTopicsGraphics = (graphics: AssembledConnectedSpaceGraphics, svg: SVGSVGElement) => {
// compute topic size
const topicMap: Map<string, AssembledTopicGraphics> = asTopicGraphicsMap(graphics);
Array.from(svg.querySelectorAll(`g[data-role=${GraphicsRole.TOPIC}]`)).forEach(topicRect => {
const topicId = topicRect.getAttribute('data-topic-id')!;
const name = topicRect.querySelector(`text[data-role='${GraphicsRole.TOPIC_NAME}']`)! as SVGTextElement;
const nameRect = name.getBBox();
const rect = topicMap.get(topicId)!.rect;
rect.frame = {...rect.frame, ...computeBlockFrameSize(nameRect)};
rect.name = computeBlockNamePosition(rect.frame);
});
redressGraphics<AssembledTopicGraphics>(Array.from(topicMap.values()), (graphics: AssembledTopicGraphics) => {
return graphics.topic.name;
}, BLOCK_MARGIN_HORIZONTAL);
return topicMap;
};
const computeSubjectGraphics = (graphics: AssembledConnectedSpaceGraphics, svg: SVGSVGElement) => {
const leftX = graphics.topics.reduce((right, topicGraphics) => {
return Math.max(right, topicGraphics.rect.frame.x + topicGraphics.rect.frame.width);
}, BLOCK_MARGIN_HORIZONTAL) + BLOCK_GAP_HORIZONTAL;
// compute subject size
const subjectMap: Map<string, AssembledSubjectGraphics> = asSubjectGraphicsMap(graphics);
Array.from(svg.querySelectorAll(`g[data-role=${GraphicsRole.SUBJECT}]`)).forEach(subjectRect => {
const subjectId = subjectRect.getAttribute('data-subject-id')!;
const name = subjectRect.querySelector(`text[data-role='${GraphicsRole.SUBJECT_NAME}']`)! as SVGTextElement;
const nameRect = name.getBBox();
const rect = subjectMap.get(subjectId)!.rect;
rect.frame = {...rect.frame, ...computeBlockFrameSize(nameRect)};
rect.name = computeBlockNamePosition(rect.frame);
});
redressGraphics<AssembledSubjectGraphics>(Array.from(subjectMap.values()), (graphics: AssembledSubjectGraphics) => {
return graphics.subject.name;
}, leftX);
return subjectMap;
};
const computeReportGraphics = (graphics: AssembledConnectedSpaceGraphics, svg: SVGSVGElement) => {
const leftX = [...graphics.topics, ...graphics.subjects].reduce((right, elementGraphics) => {
return Math.max(right, elementGraphics.rect.frame.x + elementGraphics.rect.frame.width);
}, BLOCK_MARGIN_HORIZONTAL) + BLOCK_GAP_HORIZONTAL * 2.5;
// compute subject size
const reportMap: Map<string, AssembledReportGraphics> = asReportGraphicsMap(graphics);
Array.from(svg.querySelectorAll(`g[data-role=${GraphicsRole.REPORT}]`)).forEach(reportRect => {
const reportId = reportRect.getAttribute('data-report-id')!;
const name = reportRect.querySelector(`text[data-role='${GraphicsRole.REPORT_NAME}']`)! as SVGTextElement;
const nameRect = name.getBBox();
const rect = reportMap.get(reportId)!.rect;
rect.frame = {...rect.frame, ...computeBlockFrameSize(nameRect)};
rect.name = computeBlockNamePosition(rect.frame);
});
redressGraphics<AssembledReportGraphics>(Array.from(reportMap.values()), (graphics: AssembledReportGraphics) => {
return graphics.report.name;
}, leftX);
return reportMap;
};
export const computeGraphics = (options: {
graphics: AssembledConnectedSpaceGraphics;
svg: SVGSVGElement;
}) => {
const {graphics, svg} = options;
// compute element graphics
computeTopicsGraphics(graphics, svg);
computeSubjectGraphics(graphics, svg);
computeReportGraphics(graphics, svg);
// compute svg size
const {width: svgWidth, height: svgHeight} = svg.getBoundingClientRect();
const {
width,
height
} = [...graphics.topics, ...graphics.subjects, ...graphics.reports].reduce<{ width: number, height: number }>(
(size, frameGraphics) => {
const {coordinate: {x, y}, frame: {width, height}} = frameGraphics.rect;
return {width: Math.max(size.width, x + width), height: Math.max(size.height, y + height)};
}, {width: svgWidth - BLOCK_MARGIN_HORIZONTAL, height: svgHeight - BLOCK_MARGIN_VERTICAL});
return {width: width + BLOCK_MARGIN_HORIZONTAL, height: height + BLOCK_MARGIN_VERTICAL};
};
const computeBlockSelection = (blockGraphics: ConnectedSpaceBlockGraphics) => {
return {
x: blockGraphics.rect.coordinate.x - SELECTION_GAP,
y: blockGraphics.rect.coordinate.y - SELECTION_GAP,
width: blockGraphics.rect.frame.width + SELECTION_FULL_GAP,
height: blockGraphics.rect.frame.height + SELECTION_FULL_GAP
};
};
export const computeTopicSelection = (options: { topicId: TopicId; graphics: AssembledConnectedSpaceGraphics }) => {
const {graphics, topicId} = options;
// eslint-disable-next-line
const topicGraphics = graphics.topics.find(({topic}) => topic.topicId == topicId)!;
return computeBlockSelection(topicGraphics);
};
export const computeSubjectSelection = (options: { subjectId: SubjectId; graphics: AssembledConnectedSpaceGraphics }) => {
const {graphics, subjectId} = options;
// eslint-disable-next-line
const subjectGraphics = graphics.subjects.find(({subject}) => subject.subjectId == subjectId)!;
return computeBlockSelection(subjectGraphics);
};
export const computeReportSelection = (options: { reportId: ReportId; graphics: AssembledConnectedSpaceGraphics }) => {
const {graphics, reportId} = options;
// eslint-disable-next-line
const reportGraphics = graphics.reports.find(({report}) => report.reportId == reportId)!;
return computeBlockSelection(reportGraphics);
};
export const transformGraphicsToSave = (connectedSpace: ConnectedSpace, graphics: AssembledConnectedSpaceGraphics): ConnectedSpaceGraphics => {
return {
connectId: connectedSpace.connectId,
topics: graphics.topics.map(graphics => {
return {
topicId: graphics.topic.topicId,
rect: JSON.parse(JSON.stringify(graphics.rect))
};
}),
subjects: graphics.subjects.map(graphics => {
return {
subjectId: graphics.subject.subjectId,
rect: JSON.parse(JSON.stringify(graphics.rect))
};
}),
reports: graphics.reports.map(graphics => {
return {
reportId: graphics.report.reportId,
rect: JSON.parse(JSON.stringify(graphics.rect))
};
})
};
}; | the_stack |
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found at https://github.com/ChromeDevTools/devtools-frontend/blob/master/LICENSE
// Minor refactor and modifications made by @angrykoala for wendigo
export default function SelectorFinderLoader(): void {
function nodeIsElement(node: Node): node is Element {
return node.nodeType === Node.ELEMENT_NODE;
}
if (!(window as any).WendigoPathFinder) {
class Step {
public value: string;
public optimized: boolean;
constructor(value: string, optimized?: boolean) {
this.value = value;
this.optimized = optimized || false;
}
public toString(): string {
return this.value;
}
}
const _cssPathFinderHelpers = {
_stepPreprocess(node: Node): Step | null | undefined {
if (!nodeIsElement(node))
return null;
const id = node.getAttribute('id');
if (id)
return new Step(this.idSelector(id), true);
const nodeNameLower = node.nodeName.toLowerCase();
if (nodeNameLower === 'body' || nodeNameLower === 'head' || nodeNameLower === 'html')
return new Step(node.localName, true);
const nodeName = node.localName;
const parent = node.parentNode;
if (!parent || parent.nodeType === Node.DOCUMENT_NODE)
return new Step(nodeName, true);
return undefined;
},
_cssPathStep(node: Element, isTargetNode: boolean): Step | null {
const value = this._stepPreprocess(node);
if (value !== undefined) return value;
const parent = node.parentNode;
const nodeName = node.localName;
const prefixedOwnClassNamesArray = this.prefixedElementClassNames(node);
let needsClassNames = false;
let needsNthChild = false;
let ownIndex = -1;
let elementIndex = -1;
const siblings = (parent as Node & ParentNode).children;
for (let i = 0;
(ownIndex === -1 || !needsNthChild) && i < siblings.length; ++i) {
const sibling = siblings[i];
if (sibling.nodeType !== Node.ELEMENT_NODE)
continue;
elementIndex += 1;
if (sibling === node) {
ownIndex = elementIndex;
continue;
}
if (needsNthChild)
continue;
if (sibling.localName !== nodeName)
continue;
needsClassNames = true;
const ownClassNames = new Set(prefixedOwnClassNamesArray);
if (!ownClassNames.size) {
needsNthChild = true;
continue;
}
const siblingClassNamesArray = this.prefixedElementClassNames(sibling);
for (let j = 0; j < siblingClassNamesArray.length; ++j) {
const siblingClass = siblingClassNamesArray[j];
if (!ownClassNames.has(siblingClass))
continue;
ownClassNames.delete(siblingClass);
if (!ownClassNames.size) {
needsNthChild = true;
break;
}
}
}
let result = nodeName;
if (isTargetNode && nodeName.toLowerCase() === 'input' && node.getAttribute('type') && !node.getAttribute('id') &&
!node.getAttribute('class'))
result += `[type="${node.getAttribute('type')}"]`;
if (needsNthChild) {
result += `:nth-child(${ownIndex + 1})`;
} else if (needsClassNames) {
for (const prefixedName of prefixedOwnClassNamesArray)
result += `.${this.escapeIdentifierIfNeeded(prefixedName.substr(1))}`;
}
return new Step(result, false);
},
prefixedElementClassNames(node: Element): Array<string> {
const classAttribute = node.getAttribute('class');
if (!classAttribute)
return [];
return classAttribute.split(/\s+/g).filter(Boolean).map((name) => {
// The prefix is required to store "__proto__" in a object-based map.
return `$${name}`;
});
},
idSelector(id: string): string {
return `#${this.escapeIdentifierIfNeeded(id)}`;
},
escapeIdentifierIfNeeded(ident: string): string {
if (this.isCSSIdentifier(ident))
return ident;
const shouldEscapeFirst = /^(?:[0-9]|-[0-9-]?)/.test(ident);
const lastIndex = ident.length - 1;
return ident.replace(/./g, (c, i) => {
return ((shouldEscapeFirst && i === 0) || !this.isCSSIdentChar(c)) ? this.escapeAsciiChar(c, i === lastIndex) : c;
});
},
escapeAsciiChar(c: string, isLast: boolean): string {
return `\\${this.toHexByte(c)}${isLast ? '' : ' '}`;
},
toHexByte(c: string): string {
let hexByte = c.charCodeAt(0).toString(16);
if (hexByte.length === 1)
hexByte = `0${hexByte}`;
return hexByte;
},
isCSSIdentChar(c: string): boolean {
if (/[a-zA-Z0-9_-]/.test(c))
return true;
return c.charCodeAt(0) >= 0xA0;
},
isCSSIdentifier(value: string): boolean {
// Double hyphen prefixes are not allowed by specification, but many sites use it.
return /^-{0,2}[a-zA-Z_][a-zA-Z0-9_-]*$/.test(value);
}
};
const _xPathFinderHelpers = {
_xPathStep(node: Node): Step | null {
let ownValue;
const ownIndex = this._xPathIndex(node);
if (ownIndex === -1)
return null; // Error.
switch (node.nodeType) {
case Node.ELEMENT_NODE:
if ((node as Element).getAttribute('id'))
return new Step(`//*[@id="${(node as Element).getAttribute('id')}"]`, true);
ownValue = (node as Element).localName;
break;
case Node.ATTRIBUTE_NODE:
ownValue = `@${node.nodeName}`;
break;
case Node.TEXT_NODE:
case Node.CDATA_SECTION_NODE:
ownValue = 'text()';
break;
case Node.PROCESSING_INSTRUCTION_NODE:
ownValue = 'processing-instruction()';
break;
case Node.COMMENT_NODE:
ownValue = 'comment()';
break;
case Node.DOCUMENT_NODE:
ownValue = '';
break;
default:
ownValue = '';
break;
}
if (ownIndex > 0)
ownValue += `[${ownIndex}]`;
return new Step(ownValue, node.nodeType === Node.DOCUMENT_NODE);
},
_xPathIndex(node: Node): number {
const siblings = node.parentNode ? node.parentNode.children : null;
if (!siblings)
return 0; // Root node - no siblings.
let hasSameNamedElements = false;
for (let i = 0; i < siblings.length; ++i) {
if (this.areNodesSimilar(node, siblings[i]) && siblings[i] !== node) {
hasSameNamedElements = true;
break;
}
}
if (!hasSameNamedElements)
return 0;
let ownIndex = 1; // XPath indices start with 1.
for (let i = 0; i < siblings.length; ++i) {
if (this.areNodesSimilar(node, siblings[i])) {
if (siblings[i] === node)
return ownIndex;
ownIndex++;
}
}
return -1; // An error occurred: |node| not found in parent's children.
},
areNodesSimilar(left: Node, right: Node): boolean {
// Returns -1 in case of error, 0 if no siblings matching the same expression, <XPath index among the same expression-matching sibling nodes> otherwise.
if (left === right)
return true;
if (nodeIsElement(left) && nodeIsElement(right))
return left.localName === right.localName;
if (left.nodeType === right.nodeType)
return true;
// XPath treats CDATA as text nodes.
const leftType = left.nodeType === Node.CDATA_SECTION_NODE ? Node.TEXT_NODE : left.nodeType;
const rightType = right.nodeType === Node.CDATA_SECTION_NODE ? Node.TEXT_NODE : right.nodeType;
return leftType === rightType;
}
};
(window as any).WendigoPathFinder = {
cssPath(node: Node): string {
if (!nodeIsElement(node))
return '';
const stepsFunction = _cssPathFinderHelpers._cssPathStep.bind(_cssPathFinderHelpers);
const steps = this._generatePathSteps(node, stepsFunction);
return steps.join(' > ');
},
xPath(node: Node): string {
if (node.nodeType === Node.DOCUMENT_NODE)
return '/';
const stepsFunction = _xPathFinderHelpers._xPathStep.bind(_xPathFinderHelpers);
const steps = this._generatePathSteps(node, stepsFunction);
return (steps.length && steps[0].optimized ? '' : '/') + steps.join('/');
},
_generatePathSteps(node: Node, stepFunction: (node: Node, isTarget: boolean) => Step): Array<Step> {
const steps = [];
let contextNode = node;
while (contextNode) {
const step = stepFunction(contextNode, contextNode === node);
if (!step)
break; // Error - bail out early.
steps.push(step);
if (step.optimized)
break;
contextNode = contextNode.parentNode as Node;
}
steps.reverse();
return steps;
}
};
}
} | the_stack |
import { Component, Input, OnDestroy, OnInit } from '@angular/core';
import { FormArray, FormControl, FormGroup, Validators } from '@angular/forms';
import {
CodeSpec,
EnvironmentSpec,
ExperimentMeta,
ExperimentSpec,
Specs
} from '@submarine/interfaces/experiment-spec';
import { ExperimentFormService } from '@submarine/services/experiment.form.service';
import { ExperimentService } from '@submarine/services/experiment.service';
import { ExperimentValidatorService } from '@submarine/services/experiment.validator.service';
import { customAlphabet } from 'nanoid';
import { alphanumeric } from 'nanoid-dictionary';
import { NzMessageService } from 'ng-zorro-antd';
import { Subscription } from 'rxjs';
@Component({
selector: 'submarine-experiment-customized-form',
templateUrl: './experiment-customized-form.component.html',
styleUrls: ['./experiment-customized-form.component.scss']
})
export class ExperimentCustomizedFormComponent implements OnInit, OnDestroy {
@Input() mode: 'create' | 'update' | 'clone';
// About new experiment
experiment: FormGroup;
finalExperimentSpec: ExperimentSpec;
step: number = 0;
subscriptions: Subscription[] = [];
listOfOption: Array<{ label: string; value: string }> = [];
// TODO: Fetch all images from submarine server
imageIndex = 0;
defaultImage = 'apache/submarine:tf-mnist-with-summaries-1.0';
imageList = [this.defaultImage];
// Constants
TF_SPECNAMES = ['Master', 'Worker', 'Ps'];
PYTORCH_SPECNAMES = ['Master', 'Worker'];
defaultSpecName = 'worker';
MEMORY_UNITS = ['M', 'G'];
SECOND_STEP = 1;
PREVIEW_STEP = 2;
ADVANCED = false;
// About env page
currentEnvPage = 1;
PAGESIZE = 5;
// About spec
jobTypes = 'Distributed Tensorflow';
framework = 'Tensorflow';
currentSpecPage = 1;
// About update
@Input() targetId: string = null;
@Input() targetSpec: ExperimentSpec = null;
constructor(
private experimentService: ExperimentService,
private experimentValidatorService: ExperimentValidatorService,
private experimentFormService: ExperimentFormService,
private nzMessageService: NzMessageService
) {}
ngOnInit() {
this.experiment = new FormGroup({
experimentName: new FormControl(null, [Validators.pattern('([a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9]|[a-zA-Z0-9]+)'), Validators.required]),
description: new FormControl(null, [Validators.required]),
tags: new FormControl([], []),
cmd: new FormControl('', [Validators.required]),
image: new FormControl(this.defaultImage, [Validators.required]),
envs: new FormArray([], [this.experimentValidatorService.nameValidatorFactory('key')]),
specs: new FormArray([], [this.experimentValidatorService.nameValidatorFactory('name')]),
gitRepo: new FormControl(null, [])
});
// Bind the component method for callback
this.checkStatus = this.checkStatus.bind(this);
if (this.mode === 'update') {
this.updateExperimentInit();
} else if (this.mode === 'clone') {
this.cloneExperimentInit(this.targetSpec);
}
// Fire status to parent when form value has changed
const sub1 = this.experiment.valueChanges.subscribe(this.checkStatus);
const sub2 = this.experimentFormService.stepService.subscribe((n) => {
if (n > 0) {
if (this.step === this.PREVIEW_STEP) {
this.handleSubmit();
} else if (this.step === this.SECOND_STEP) {
this.onPreview();
this.step += 1;
} else {
this.step += 1;
}
} else {
this.step -= 1;
}
// Send the current step and okText back to parent
this.experimentFormService.modalPropsChange({
okText: this.step !== this.PREVIEW_STEP ? 'Next step' : 'Submit',
currentStep: this.step
});
// Run check after step is changed
this.checkStatus();
});
this.subscriptions.push(sub1, sub2);
//TODO: get tags from server
this.listOfOption = [];
}
ngOnDestroy() {
// Clean up the subscriptions
this.subscriptions.forEach((sub) => {
sub.unsubscribe();
});
}
addItem(input: HTMLInputElement): void {
const value = input.value;
if (this.imageList.indexOf(value) === -1) {
this.imageList = [...this.imageList, input.value || `New item ${this.imageIndex++}`];
}
}
// Getters of experiment request form
get experimentName() {
return this.experiment.get('experimentName');
}
get tags() {
return this.experiment.get('tags');
}
get description() {
return this.experiment.get('description');
}
get cmd() {
return this.experiment.get('cmd');
}
get envs() {
return this.experiment.get('envs') as FormArray;
}
get image() {
return this.experiment.get('image');
}
get specs() {
return this.experiment.get('specs') as FormArray;
}
get gitRepo() {
return this.experiment.get('gitRepo');
}
/**
* Reset properties in parent component when the form is about to closed
*/
closeModal() {
this.experimentFormService.modalPropsClear();
}
/**
* Check the validity of the experiment page
*/
checkStatus() {
if (this.step === 0) {
this.experimentFormService.btnStatusChange(
this.experimentName.invalid ||
this.tags.invalid ||
this.cmd.invalid ||
this.image.invalid ||
this.envs.invalid
);
} else if (this.step === 1) {
this.experimentFormService.btnStatusChange(this.specs.invalid);
}
}
onPreview() {
this.finalExperimentSpec = this.constructSpec();
}
/**
* Event handler for Next step/Submit button
*/
handleSubmit() {
if (this.mode === 'create') {
this.experimentService.createExperiment(this.finalExperimentSpec).subscribe({
next: () => {},
error: (msg) => {
this.nzMessageService.error(`${msg}, please try again`, {
nzPauseOnHover: true
});
},
complete: () => {
this.nzMessageService.success('Experiment creation succeeds');
this.experimentFormService.fetchList();
this.closeModal();
}
});
} else if (this.mode === 'update') {
this.experimentService.updateExperiment(this.targetId, this.finalExperimentSpec).subscribe(
null,
(msg) => {
this.nzMessageService.error(`${msg}, please try again`, {
nzPauseOnHover: true
});
},
() => {
this.nzMessageService.success('Modification succeeds!');
this.experimentFormService.fetchList();
this.closeModal();
}
);
} else if (this.mode === 'clone') {
this.experimentService.createExperiment(this.finalExperimentSpec).subscribe(
null,
(msg) => {
this.nzMessageService.error(`${msg}, please try again`, {
nzPauseOnHover: true
});
},
() => {
this.nzMessageService.success('Create a new experiment !');
this.experimentFormService.fetchList();
this.closeModal();
}
);
}
}
/**
* Create a new env variable input
*/
createEnv(defaultKey: string = '', defaultValue: string = '') {
// Create a new FormGroup
return new FormGroup(
{
key: new FormControl(defaultKey, [Validators.required]),
value: new FormControl(defaultValue, [Validators.required])
},
[this.experimentValidatorService.envValidator]
);
}
/**
* Create a new spec
*/
createSpec(
defaultName: string = 'Worker',
defaultReplica: number = 1,
defaultCpu: number = 1,
defaultGpu: number = 0,
defaultMemory: number = 1024,
defaultUnit: string = 'M'
): FormGroup {
return new FormGroup(
{
name: new FormControl(defaultName, [Validators.required]),
replicas: new FormControl(defaultReplica, [Validators.min(1), Validators.required]),
cpus: new FormControl(defaultCpu, [Validators.min(1), Validators.required]),
gpus: new FormControl(defaultGpu, [Validators.min(0), Validators.required]),
memory: new FormGroup(
{
num: new FormControl(defaultMemory, [Validators.required]),
unit: new FormControl(defaultUnit, [Validators.required])
},
[this.experimentValidatorService.memoryValidator]
)
},
[this.experimentValidatorService.specValidator]
);
}
/**
* Handler for the create env button
*/
onCreateEnv() {
const env = this.createEnv();
this.envs.push(env);
// If the new page is created, jump to that page
if (this.envs.controls.length > 1 && this.envs.controls.length % this.PAGESIZE === 1) {
this.currentEnvPage += 1;
}
}
/**
* Handler for the create spec button
*/
onCreateSpec() {
const spec = this.createSpec();
this.specs.push(spec);
// If the new page is created, jump to that page
if (this.specs.controls.length > 1 && this.specs.controls.length % this.PAGESIZE === 1) {
this.currentSpecPage += 1;
}
}
/**
* Construct spec for new experiment creation
*/
constructSpec(): ExperimentSpec {
// Construct the spec
const meta: ExperimentMeta = {
name: this.experimentName.value.toLowerCase(),
tags: this.tags.value,
framework: this.framework === 'Standalone' ? 'Tensorflow' : this.framework,
cmd: this.cmd.value,
envVars: {},
};
for (const env of this.envs.controls) {
if (env.get('key').value) {
meta.envVars[env.get('key').value] = env.get('value').value;
}
}
const specs: Specs = {};
for (const spec of this.specs.controls) {
if (spec.get('name').value) {
specs[spec.get('name').value] = {
replicas: spec.get('replicas').value,
resources: `cpu=${spec.get('cpus').value},nvidia.com/gpu=${spec.get('gpus').value},memory=${
spec.get('memory').get('num').value
}${spec.get('memory').get('unit').value}`
};
}
}
const environment: EnvironmentSpec = {
image: this.image.value
};
const code: CodeSpec = {
syncMode: 'git',
url: this.gitRepo.value
};
const newExperimentSpec: ExperimentSpec = {
meta: meta,
environment: environment,
spec: specs
};
if (code.url !== null) {
newExperimentSpec.code = code;
}
return newExperimentSpec;
}
/**
* Delete list items(envs or specs)
*
* @param arr - The FormArray containing the item
* @param index - The index of the item
*/
deleteItem(arr: FormArray, index: number) {
arr.removeAt(index);
}
deleteAllItem(arr: FormArray) {
arr.clear();
}
updateExperimentInit() {
// Prevent user from modifying the name
this.experimentName.disable();
// Put value back
this.experimentName.setValue(this.targetSpec.meta.name);
this.cloneExperiment(this.targetSpec);
// Check status to enable next btn
this.checkStatus();
}
cloneExperimentInit(spec: ExperimentSpec) {
// Enable user from modifying the name
this.experimentName.enable();
// Put value back
const id: string = customAlphabet(alphanumeric, 8)();
const cloneExperimentName = spec.meta.name + '-' + id;
this.experimentName.setValue(cloneExperimentName.toLocaleLowerCase());
this.cloneExperiment(spec);
this.checkStatus();
}
cloneExperiment(spec: ExperimentSpec) {
this.tags.setValue(spec.meta.tags);
this.description.setValue(spec.meta.description);
this.cmd.setValue(spec.meta.cmd);
this.image.setValue(spec.environment.image);
if (this.imageList.indexOf(spec.environment.image) === -1) {
this.imageList = [...this.imageList, spec.environment.image || `New item ${this.imageIndex++}`];
}
for (const [key, value] of Object.entries(spec.meta.envVars)) {
const env = this.createEnv(key, value);
this.envs.push(env);
}
for (const [specName, info] of Object.entries(spec.spec)) {
const cpuCount = info.resourceMap.cpu;
const gpuCount = info.resourceMap.gpu === undefined ? '0' : '1';
const [memory, unit] = info.resourceMap.memory.match(/\d+|[MG]/g);
const newSpec = this.createSpec(
specName,
parseInt(info.replicas, 10),
parseInt(cpuCount, 10),
parseInt(gpuCount, 10),
parseInt(memory, 10),
unit
);
this.specs.push(newSpec);
}
}
} | the_stack |
import { createRule, getAttrSpecs } from '@markuplint/ml-core';
import {
ariaSpec,
checkAria,
getComputedRole,
getImplicitRole,
getPermittedRoles,
getRoleSpec,
htmlSpec,
isValidAttr,
} from '../helpers';
type Options = {
checkingValue?: boolean;
checkingDeprecatedProps?: boolean;
permittedAriaRoles?: boolean;
disallowSetImplicitRole?: boolean;
disallowSetImplicitProps?: boolean;
disallowDefaultValue?: boolean;
};
export default createRule<true, Options>({
defaultValue: true,
defaultOptions: {
checkingValue: true,
checkingDeprecatedProps: true,
permittedAriaRoles: true,
disallowSetImplicitRole: true,
disallowSetImplicitProps: true,
disallowDefaultValue: false,
},
async verify({ document, report, t }) {
await document.walkOn('Element', async node => {
const attrSpecs = getAttrSpecs(node.nameWithNS, document.specs);
const html = htmlSpec(node.nodeName);
const { roles, ariaAttrs } = ariaSpec();
if (!html || !attrSpecs) {
return;
}
const roleAttrTokens = node.getAttributeToken('role');
const roleAttr = roleAttrTokens[0];
// Roles in the spec
if (roleAttr) {
const value = roleAttr.getValue().potential.trim().toLowerCase();
const existedRole = roles.find(role => role.name === value);
if (!existedRole) {
// Not exist
report({
scope: node,
message:
t(
'{0} according to {1}',
t('{0} does not exist', t('the "{0}" {1}', value, 'role')),
'the WAI-ARIA specification',
) + `This "${value}" role does not exist in WAI-ARIA.`,
line: roleAttr.startLine,
col: roleAttr.startCol,
raw: roleAttr.raw,
});
} else if (existedRole.isAbstract) {
// the abstract role
report({
scope: node,
message: t('{0} is {1}', t('the "{0}" {1}', value, 'role'), 'the abstract role'),
line: roleAttr.startLine,
col: roleAttr.startCol,
raw: roleAttr.raw,
});
}
// Set the implicit role explicitly
if (node.rule.option.disallowSetImplicitRole) {
const implictRole = getImplicitRole(node);
if (implictRole && implictRole === value) {
// the implicit role
report({
scope: node,
message: t(
'{0} is {1}',
t('the "{0}" {1}', value, 'role'),
t('{0} of {1}', 'the implicit role', t('the "{0}" {1}', node.nodeName, 'element')),
),
line: roleAttr.startLine,
col: roleAttr.startCol,
raw: roleAttr.raw,
});
}
}
// Permitted ARIA Roles
if (node.rule.option.permittedAriaRoles) {
const permittedRoles = getPermittedRoles(node);
if (permittedRoles === false) {
report({
scope: node,
message: t(
'{0} according to {1}',
t(
'Cannot overwrite {0}',
t('{0} of {1}', t('the {0}', 'role'), t('the "{0}" {1}', node.nodeName, 'element')),
),
'ARIA in HTML specification',
),
line: roleAttr.startLine,
col: roleAttr.startCol,
raw: roleAttr.raw,
});
} else if (Array.isArray(permittedRoles) && !permittedRoles.includes(value)) {
report({
scope: node,
message: t(
'{0} according to {1}',
t(
'Cannot overwrite {0} to {1}',
t('the "{0}" {1}', value, 'role'),
t('the "{0}" {1}', node.nodeName, 'element'),
),
'ARIA in HTML specification',
),
line: roleAttr.startLine,
col: roleAttr.startCol,
raw: roleAttr.raw,
});
}
}
}
const computedRole = getComputedRole(node);
if (computedRole) {
const role = getRoleSpec(computedRole.name);
if (role) {
// Checking aria-* on the role
for (const attr of node.attributes) {
const attrName = attr.getName().potential.trim().toLowerCase();
if (/^aria-/i.test(attrName)) {
const statesAndProp = role.statesAndProps.find(s => s.name === attrName);
if (statesAndProp) {
if (node.rule.option.checkingDeprecatedProps && statesAndProp.deprecated) {
report({
scope: node,
message: t(
'{0:c} on {1}',
t(
'{0} is {1:c}',
t('the "{0}" {1}', attrName, 'ARIA state/property'),
'deprecated',
),
t('the "{0}" {1}', role.name, 'role'),
),
line: attr.startLine,
col: attr.startCol,
raw: attr.raw,
});
}
} else {
report({
scope: node,
message: t(
'{0:c} on {1}',
t(
'{0} is {1:c}',
t('the "{0}" {1}', attrName, 'ARIA state/property'),
'disallowed',
),
t('the "{0}" {1}', role.name, 'role'),
),
line: attr.startLine,
col: attr.startCol,
raw: attr.raw,
});
}
}
}
// Checing required props
if (!computedRole.isImplicit) {
const requiredProps = role.statesAndProps.filter(s => s.required).map(s => s.name);
for (const requiredProp of requiredProps) {
const has = node.attributes.some(attr => {
const attrName = attr.getName().potential.trim().toLowerCase();
return attrName === requiredProp;
});
if (!has) {
report({
scope: node,
message: t(
'{0:c} on {1}',
t('Require {0}', t('the "{0}" {1}', requiredProp, 'ARIA state/property')),
t('the "{0}" {1}', role.name, 'role'),
),
});
}
}
}
}
} else {
// No role element
const { ariaAttrs } = ariaSpec();
for (const attr of node.attributes) {
const attrName = attr.getName().potential.trim().toLowerCase();
if (/^aria-/i.test(attrName)) {
const ariaAttr = ariaAttrs.find(attr => attr.name === attrName);
if (ariaAttr && !ariaAttr.isGlobal) {
report({
scope: node,
message: t(
'{0} is not {1}',
t('the "{0}" {1}', attrName, 'ARIA state/property'),
'global state/property',
),
line: attr.startLine,
col: attr.startCol,
raw: attr.raw,
});
}
}
}
}
for (const attr of node.attributes) {
if (attr.attrType === 'html-attr' && attr.isDynamicValue) {
continue;
}
const attrName = attr.getName().potential.trim().toLowerCase();
if (/^aria-/i.test(attrName)) {
const value = attr.getValue().potential.trim().toLowerCase();
const propSpec = ariaAttrs.find(p => p.name === attrName);
// Checking ARIA Value
if (node.rule.option.checkingValue) {
const result = checkAria(attrName, value, computedRole?.name);
if (!result.isValid) {
report({
scope: node,
message:
t(
'{0:c} on {1}',
t('{0} is {1:c}', t('the "{0}"', value), 'disallowed'),
t('the "{0}" {1}', attrName, 'ARIA state/property'),
) +
('enum' in result && result.enum.length
? t('. ') + t('Allowed values are: {0}', t(result.enum))
: ''),
line: attr.startLine,
col: attr.startCol,
raw: attr.raw,
});
}
}
// Checking implicit props
if (node.rule.option.disallowSetImplicitProps) {
if (propSpec && propSpec.equivalentHtmlAttrs) {
for (const equivalentHtmlAttr of propSpec.equivalentHtmlAttrs) {
const htmlAttrSpec = attrSpecs.find(a => a.name === equivalentHtmlAttr.htmlAttrName);
const isValid = isValidAttr(
t,
equivalentHtmlAttr.htmlAttrName,
equivalentHtmlAttr.value || '',
false,
node,
attrSpecs,
);
if (isValid && isValid.invalidType === 'non-existent') {
continue;
}
if (node.hasAttribute(equivalentHtmlAttr.htmlAttrName)) {
const targetAttrValue = node.getAttribute(equivalentHtmlAttr.htmlAttrName);
if (
(equivalentHtmlAttr.value == null && targetAttrValue === value) ||
equivalentHtmlAttr.value === value
) {
report({
scope: node,
message: t(
'{0} has {1}',
t('the "{0}" {1}', attrName, 'ARIA state/property'),
t(
'the same {0} as {1}',
'semantics',
t(
'{0} or {1}',
t(
'the current "{0}" {1}',
equivalentHtmlAttr.htmlAttrName,
'attribute',
),
t(
'the implicit "{0}" {1}',
equivalentHtmlAttr.htmlAttrName,
'attribute',
),
),
),
),
line: attr.startLine,
col: attr.startCol,
raw: attr.raw,
});
continue;
}
if (htmlAttrSpec?.type === 'Boolean' && value !== 'false') {
continue;
}
report({
scope: node,
message: t(
'{0} contradicts {1}',
t('the "{0}" {1}', attrName, 'ARIA state/property'),
t('the current "{0}" {1}', equivalentHtmlAttr.htmlAttrName, 'attribute'),
),
line: attr.startLine,
col: attr.startCol,
raw: attr.raw,
});
} else if (value === 'true') {
if (!equivalentHtmlAttr.isNotStrictEquivalent && htmlAttrSpec?.type === 'Boolean') {
report({
scope: node,
message: t(
'{0} contradicts {1}',
t('the "{0}" {1}', attrName, 'ARIA state/property'),
t(
'the implicit "{0}" {1}',
equivalentHtmlAttr.htmlAttrName,
'attribute',
),
),
line: attr.startLine,
col: attr.startCol,
raw: attr.raw,
});
}
}
}
}
}
// Default value
if (node.rule.option.disallowDefaultValue && propSpec && propSpec.defaultValue === value) {
report({
scope: node,
message: t('It is {0}', 'default value'),
line: attr.startLine,
col: attr.startCol,
raw: attr.raw,
});
}
}
}
});
},
}); | the_stack |
'use strict';
import * as fastEquals from 'fast-deep-equal/es6';
import match, {Matcher} from "./match";
export function deepEquals<T>(a: T, b: T, ignoreKeys?: (keyof T)[]): boolean {
if ((a === undefined && b !== undefined) || (b === undefined && a !== undefined)) {
return false;
}
if ((a === null && b !== null) || (b === null && a !== null)) {
return false;
}
if (ignoreKeys && a && b) {
a = ignoreKeys.reduce((acc: T, key: keyof T) => {
return removeKeys(acc, key)
}, a)
b = ignoreKeys.reduce((acc: T, key: keyof T) => {
return removeKeys(acc, key)
}, b)
}
return fastEquals(a, b)
}
// Shallow Equality that handles empty objects / arrays
export function shallowEquals<T>(a: T, b: T): boolean {
if (a === b) return true
if (isObject(a) && isObject(b) && isEmpty(a) && isEmpty(b)) {
return true
}
return Array.isArray(a) && Array.isArray(b) && isEmpty(a) && isEmpty(b);
}
//*********************
//* Object helpers
//*********************
// Checks if variable is an object (and not an array, even though arrays are technically objects). Maybe there's a more correct name for this method.
export function isObject(obj: any): obj is object {
return obj && typeof obj === "object" && !Array.isArray(obj)
}
export function isEmpty(obj: object): boolean {
return obj && Object.keys(obj).length === 0
}
export function diffArray<T>(a: T[], b: T[], equals: (a: T, b: T) => boolean = (a: T, b: T) => deepEquals(a, b)): [T[], T[]] {
const aNotB = a.filter(x => b.findIndex(el => equals(x, el)) === -1);
const bNotA = b.filter(x => a.findIndex(el => equals(x, el)) === -1);
return [aNotB, bNotA]
}
export function changedKeys<T extends Record<string, any>>(oldT: T, newT: T): (keyof T)[] {
if (deepEquals(oldT, newT)) {
return []
} else {
return Object.keys(oldT).reduce((acc: string[], next: string) => {
if (!deepEquals(oldT[next], newT[next])) {
acc.push(next)
}
return acc
}, [])
}
}
export function deepFreeze<T>(obj: T) {
function go(obj: T) {
if (obj && typeof obj === "object") {
Object.values(obj).forEach(v => go(v))
return Object.isFrozen(obj) ? obj : Object.freeze(obj)
} else {
return obj
}
}
return go(obj)
}
export function deepCopy<T>(obj: T, keepFrozen: boolean = false): T {
if (obj instanceof Array) {
return [...obj].map(item => deepCopy(item)) as any as T;
} else if (obj === null || typeof obj === 'undefined') {
return obj;
} else if (typeof obj === 'object' && (!keepFrozen || !Object.isFrozen(obj))) {
const result: any = {};
const objAny = obj as any;
for (let key of Object.getOwnPropertyNames(objAny)) {
result[key] = deepCopy(objAny[key]);
}
Object.setPrototypeOf(result, Object.getPrototypeOf(obj));
return result as T;
}
return obj;
}
/**
* A generic copy-and-update, like a case class's `copy` method.
*
* @param srcObj The object to be copied
* @param changes Optionally, a partial object of new values to be replaced in the copy
* @return A (shallow) copy of the source object, with any given changes applied. If the source object
* was frozen, the copy will also be frozen.
*/
export function copyObject<T>(srcObj: T, changes?: Partial<T>): T {
if (srcObj === null || srcObj === undefined || typeof srcObj !== 'object')
return srcObj;
const result: T = {...srcObj};
if (changes !== undefined) {
for (const prop in changes) {
if (changes.hasOwnProperty(prop)) {
const key = prop as keyof T;
result[key] = changes[key]!;
}
}
}
Object.setPrototypeOf(result, Object.getPrototypeOf(srcObj));
if (Object.isFrozen(srcObj)) {
Object.freeze(result);
}
return result as T;
}
export function equalsByKey<A, B>(a: A, b: B, keys: NonEmptyArray<(keyof A & keyof B & PropertyKey)>): boolean {
return keys.every(k => {
if (k in a && k in b) {
return deepEquals(a[k], b[k] as any) // TODO: is there a way to fiddle with the types so this works without any?
} else return false
})
}
export function removeKeys<T>(obj: T, k: (keyof T)[] | keyof T): T {
let keyStrings: string[];
if (k instanceof Array) {
keyStrings = k.map(key => key.toString())
} else {
keyStrings = [k.toString()]
}
return Object.keys(obj).reduce((acc: T, key: string) => {
if (! keyStrings.includes(key)) {
return { ...acc, [key]: obj[key as keyof T] }
}
return acc
}, {} as T)
}
export function mapValues<V, U>(obj:{ [K in PropertyKey]: V }, f: (x: V) => U): { [K in PropertyKey]: U } {
return Object.fromEntries(Object.entries(obj).map(([k, v]) => [k, f(v)]))
}
export function collectFields<K extends PropertyKey, V, V1>(obj: Record<K, V>, fn: (key: K, value: V) => V1 | undefined): Record<K, V1> {
const results: Record<K, V1> = {} as Record<K, V1>;
for (const prop in obj) {
if (obj.hasOwnProperty(prop)) {
const k = prop as K;
const result = fn(k, obj[k]);
if (result !== undefined) {
results[k] = result;
}
}
}
return results;
}
//*********************
//* Array helpers
//*********************
export function arrInsert<T>(arr: T[], idx: number, t: T) {
if (idx > -1) {
return [...arr.slice(0, idx), t, ...arr.slice(idx)]
} else return [t, ...arr]
}
export function arrReplace<T>(arr: T[], idx: number, t: T) {
if (idx > -1) {
return [...arr.slice(0, idx), t, ...arr.slice(idx + 1)]
} else return [t, ...arr]
}
export function arrDelete<T>(arr: T[], idx: number) {
if (idx > -1) {
return [...arr.slice(0, idx), ...arr.slice(idx + 1)]
} else return arr
}
export function arrDeleteFirstItem<T>(arr: T[], item: T) {
const idx = isObject(item) || Array.isArray(item)
? arr.findIndex(i => deepEquals(i, item))
: arr.indexOf(item)
if (idx >= 0) {
return [...arr.slice(0, idx), ...arr.slice(idx + 1)]
} else return arr
}
export function unzip<A, B>(arr: [A, B][]): [A[], B[]] {
return arr.reduce<[A[], B[]]>(([as, bs], [a, b]) => {
return [[...as, a], [...bs, b]]
}, [[], []])
}
export function unzip3<A, B, C>(arr: [A, B, C][]): [A[], B[], C[]] {
return arr.reduce<[A[], B[], C[]]>(([as, bs, cs], [a, b, c]) => {
return [[...as, a], [...bs, b], [...cs, c]]
}, [[], [], []])
}
export function collect<T, U>(arr: T[], fun: (t: T, index: number) => U | undefined | null): U[] {
return arr.flatMap((t, index) => {
const newT = fun(t, index)
if (newT !== undefined && newT !== null) {
return [newT]
} else return []
})
}
export function collectDefined<T>(...arr: (T | undefined)[]): T[] {
return arr.filter(value => value !== undefined) as T[];
}
export function collectMatch<T, R>(arr: T[], fn: (matcher: Matcher<T>) => Matcher<T, R>): R[] {
const result: R[] = [];
arr.forEach(value => {
const matched = fn(new Matcher(value)).orUndefined;
if (matched) {
result.push(matched);
}
})
return result;
}
export function collectInstances<T, R extends T>(arr: T[], constructor: new (...args: any[]) => R): R[] {
const result: R[] = [];
arr.forEach(t => {
if (t instanceof constructor) {
result.push(t);
}
});
return result;
}
export function collectFirstMatch<T, R>(arr: T[], fn: (matcher: Matcher<T>) => Matcher<T, R>): R | undefined {
for (let i = 0; i < arr.length; i++) {
const result = fn(match(arr[i])).orUndefined;
if (result)
return result;
}
return undefined;
}
export function findInstance<T, U>(arr: T[], u: new (...args: any[]) => U): U | undefined {
const result = arr.find(t => t instanceof u);
return result ? result as any as U : undefined;
}
export function arrExists<T>(arr: T[], fun: (t: T) => boolean): boolean {
for (let i = 0; i < arr.length; i++) {
if (fun(arr[i])) {
return true;
}
}
return false;
}
/**
* Foreach that wont go crazy if the callback for an element causes that element to be removed.
* It goes backwards.
*/
export function safeForEach<T>(arr: T[], fun: (t: T, index: number, arr: T[]) => void): void {
for(let idx = arr.length - 1; idx >= 0; idx--) {
fun(arr[idx], idx, arr)
}
}
/**
* Partitions an array into two arrays according to the predicate.
*
* @param arr T[]
* @param pred A predicate of T => boolean
* @return A pair of T[]s. The first T[] consists of all elements that satisfy the predicate, the second T[]
* consists of all elements that don't. The relative order of the elements in the resulting T[] is the same
* as in the original T[].
*/
export function partition<T>(arr: T[], pred: (t: T) => boolean): [T[], T[]] {
return arr.reduce<[T[], T[]]>((acc, next) => {
if (pred(next)) {
return [[...acc[0], next], acc[1]]
} else {
return [acc[0], [...acc[1], next]]
}
}, [[], []])
}
export function mapSome<T>(arr: T[], cond: (t: T) => boolean, fn: (t: T) => T): T[] {
return arr.map(item => {
if (cond(item)) {
return fn(item)
} else return item
})
}
export function arrayStartsWith<T>(base: T[], start: T[]) {
if (start.length > base.length) return false;
for (let i = 0; i < start.length; i++) {
if (! deepEquals(base[i], start[i])) {
return false;
}
}
return true;
}
//****************
//* String Helpers
//****************
/**
* Split a string by line breaks, keeping the line breaks in the results
*/
export function splitWithBreaks(outputStr: string): string[] {
const result: string[] = [];
const matches = outputStr.match(/[^\n]+\n?/g);
if (matches)
matches.forEach(line => result.push(line));
return result;
}
export function positionIn(str: string, line: number, column: number): number {
const lines = splitWithBreaks(str);
const targetLine = Math.min(line - 1, lines.length);
let pos = 0;
let currentLine = 0;
for (currentLine = 0; currentLine < targetLine; currentLine++) {
pos += lines[currentLine].length;
}
if (currentLine < lines.length) {
pos += column;
}
return pos;
}
//****************
//* String Helpers
//****************
export function parseQuotedArgs(args: string): string[] {
function parseQuoted(rest: string): [string, string] {
const match = /^((?:[^"\\]|\\.)*)"/.exec(rest);
if (!match || !match[1])
return [rest, ""];
const arg = match[1];
return [arg.replace('\\"', '"'), rest.substr(arg.length + 1)]
}
function parseUnquoted(rest: string): [string, string] {
const nextSpace = rest.indexOf(' ');
if (nextSpace === -1)
return [rest, ""];
return [rest.substring(0, nextSpace), rest.substring(nextSpace + 1)];
}
function parseNext(rest: string): [string, string] {
rest = rest.trimLeft();
if (!rest)
return ["", ""];
if (rest.charAt(0) === '"')
return parseQuoted(rest.substring(1));
return parseUnquoted(rest);
}
const result = [];
let remaining = args;
while (remaining.length) {
const next = parseNext(remaining);
if (next[0])
result.push(next[0]);
remaining = next[1];
}
return result;
}
function quoted(str: string | undefined): string {
if (str && (str.indexOf('"') !== -1 || str.indexOf(' ') !== -1)) {
return ['"', str.replace('\\', '\\\\').replace('"', '\\"'), '"'].join('');
} else {
return str || "";
}
}
export function joinQuotedArgs(strs: string[] | undefined): string | undefined {
return strs?.map(quoted).join(' ')
}
//****************
//* Other Helpers
//****************
export function isDescendant(el: HTMLElement, maybeAncestor: HTMLElement, bound?: HTMLElement): boolean {
let current: HTMLElement | null = el;
while (current && current !== bound) {
if (current === maybeAncestor) {
return true;
}
current = current.parentElement;
}
return false;
}
export function mapOpt<T, U>(value: T | undefined, fn: (arg: T) => U): U | undefined {
if (value !== undefined) {
return fn(value);
}
return undefined;
}
export type InterfaceOf<T> =
T extends string ? T :
T extends Array<infer U> ? Array<InterfaceOf<U>> :
T extends Object ? {
[P in keyof T]: InterfaceOf<T[P]>
} :
T
export class Deferred<T> implements Promise<T> {
private _promise: Promise<T>;
resolve: (value?: (PromiseLike<T> | T)) => void;
reject: (reason?: any) => void;
isSettled: boolean = false;
// To implement Promise
readonly [Symbol.toStringTag]: string;
constructor() {
this._promise = new Promise<T>((resolve, reject) => {
// assign the resolve and reject functions to `this`
// making them usable on the class instance
this.resolve = resolve;
this.reject = reject;
});
// bind `then` and `catch` to implement the same interface as Promise
this.then = this._promise.then.bind(this._promise);
this.catch = this._promise.catch.bind(this._promise);
this[Symbol.toStringTag] = 'Promise';
this.finally(() => {
this.isSettled = true
})
}
then<TResult1 = T, TResult2 = never>(onfulfilled?: ((value: T) => (PromiseLike<TResult1> | TResult1)) | undefined | null, onrejected?: ((reason: any) => (PromiseLike<TResult2> | TResult2)) | undefined | null): Promise<TResult1 | TResult2> {
return this._promise.then(onfulfilled, onrejected);
}
catch<TResult = never>(onrejected?: ((reason: any) => (PromiseLike<TResult> | TResult)) | undefined | null): Promise<T | TResult> {
return this._promise.catch(onrejected);
}
finally(onfinally?: (() => void) | undefined | null): Promise<T> {
return this._promise.finally(onfinally);
}
}
export function nameFromPath(path: string): string {
return path.split(/\//g).pop()!;
}
// line, pos
export function linePosAt(str: string, offset: number): [number, number] {
let line = 0;
let index = 0;
offset = Math.min(str.length - 1, offset);
while (index < offset) {
const nextLineIndex = str.indexOf("\n", index);
if (nextLineIndex === -1 || nextLineIndex >= offset) {
return [line, offset - index];
}
index = nextLineIndex + 1;
line++;
}
return [line, 0];
}
export function TODO(): never {
console.error("An implementation is missing!")
throw new Error("An implementation is missing!")
} | the_stack |
import React from 'react'
import styled from 'styled-components'
import {
makeUseTable,
// usePagination,
// useSortBy,
// useFilters,
// useGroupBy,
// useExpanded,
// useRowSelect,
} from '@tanstack/react-table'
import matchSorter from 'match-sorter'
import makeData from './makeData'
const Styles = styled.div`
padding: 1rem;
table {
border-spacing: 0;
border: 1px solid black;
tr {
:last-child {
td {
border-bottom: 0;
}
}
}
th,
td {
margin: 0;
padding: 0.5rem;
border-bottom: 1px solid black;
border-right: 1px solid black;
:last-child {
border-right: 0;
}
}
td {
input {
font-size: 1rem;
padding: 0;
margin: 0;
border: 0;
}
}
}
.pagination {
padding: 0.5rem;
}
`
// Create an editable cell renderer
const EditableCell = ({
value: initialValue,
row: { index },
column: { id },
updateMyData, // This is a custom function that we supplied to our table instance
editable,
}) => {
// We need to keep and update the state of the cell normally
const [value, setValue] = React.useState(initialValue)
const onChange = e => {
setValue(e.target.value)
}
// We'll only update the external data when the input is blurred
const onBlur = () => {
updateMyData(index, id, value)
}
// If the initialValue is changed externall, sync it up with our state
React.useEffect(() => {
setValue(initialValue)
}, [initialValue])
if (!editable) {
return `${initialValue}`
}
return <input value={value} onChange={onChange} onBlur={onBlur} />
}
// Define a default UI for filtering
function DefaultColumnFilter({
column: { filterValue, preFilteredRows, setFilter },
}) {
const count = preFilteredRows.length
return (
<input
value={filterValue || ''}
onChange={e => {
setFilter(e.target.value || undefined) // Set undefined to remove the filter entirely
}}
placeholder={`Search ${count} records...`}
/>
)
}
// This is a custom filter UI for selecting
// a unique option from a list
function SelectColumnFilter({
column: { filterValue, setFilter, preFilteredRows, id },
}) {
// Calculate the options for filtering
// using the preFilteredRows
const options = React.useMemo(() => {
const options = new Set()
preFilteredRows.forEach(row => {
options.add(row.values[id])
})
return [...options.values()]
}, [id, preFilteredRows])
// Render a multi-select box
return (
<select
value={filterValue}
onChange={e => {
setFilter(e.target.value || undefined)
}}
>
<option value="">All</option>
{options.map((option, i) => (
<option key={i} value={option}>
{option}
</option>
))}
</select>
)
}
// This is a custom filter UI that uses a
// slider to set the filter value between a column's
// min and max values
function SliderColumnFilter({
column: { filterValue, setFilter, preFilteredRows, id },
}) {
// Calculate the min and max
// using the preFilteredRows
const [min, max] = React.useMemo(() => {
let min = preFilteredRows.length ? preFilteredRows[0].values[id] : 0
let max = preFilteredRows.length ? preFilteredRows[0].values[id] : 0
preFilteredRows.forEach(row => {
min = Math.min(row.values[id], min)
max = Math.max(row.values[id], max)
})
return [min, max]
}, [id, preFilteredRows])
return (
<>
<input
type="range"
min={min}
max={max}
value={filterValue || min}
onChange={e => {
setFilter(parseInt(e.target.value, 10))
}}
/>
<button onClick={() => setFilter(undefined)}>Off</button>
</>
)
}
// This is a custom UI for our 'between' or number range
// filter. It uses two number boxes and filters rows to
// ones that have values between the two
function NumberRangeColumnFilter({
column: { filterValue = [], preFilteredRows, setFilter, id },
}) {
const [min, max] = React.useMemo(() => {
let min = preFilteredRows.length ? preFilteredRows[0].values[id] : 0
let max = preFilteredRows.length ? preFilteredRows[0].values[id] : 0
preFilteredRows.forEach(row => {
min = Math.min(row.values[id], min)
max = Math.max(row.values[id], max)
})
return [min, max]
}, [id, preFilteredRows])
return (
<div
style={{
display: 'flex',
}}
>
<input
value={filterValue[0] || ''}
type="number"
onChange={e => {
const val = e.target.value
setFilter((old = []) => [val ? parseInt(val, 10) : undefined, old[1]])
}}
placeholder={`Min (${min})`}
style={{
width: '70px',
marginRight: '0.5rem',
}}
/>
to
<input
value={filterValue[1] || ''}
type="number"
onChange={e => {
const val = e.target.value
setFilter((old = []) => [old[0], val ? parseInt(val, 10) : undefined])
}}
placeholder={`Max (${max})`}
style={{
width: '70px',
marginLeft: '0.5rem',
}}
/>
</div>
)
}
function fuzzyTextFilterFn(rows, id, filterValue) {
return matchSorter(rows, filterValue, { keys: [row => row.values[id]] })
}
// Let the table remove the filter if the string is empty
fuzzyTextFilterFn.autoRemove = val => !val
// Be sure to pass our updateMyData and the skipPageReset option
function Table({ columns, data, updateMyData, skipPageReset }) {
const filterFns = React.useMemo(
() => ({
// Add a new fuzzyTextFilterFn filter type.
fuzzyText: fuzzyTextFilterFn,
// Or, override the default text filter to use
// "startWith"
text: (rows, id, filterValue) => {
return rows.filter(row => {
const rowValue = row.values[id]
return rowValue !== undefined
? String(rowValue)
.toLowerCase()
.startsWith(String(filterValue).toLowerCase())
: true
})
},
}),
[]
)
const defaultColumn = React.useMemo(
() => ({
// Let's set up our default Filter UI
Filter: DefaultColumnFilter,
// And also our default editable cell
Cell: EditableCell,
}),
[]
)
const useTable = makeUseTable({
plugins: [
// withFilters,
// withGroupBy,
// withSortBy,
// withExpanded,
// withPagination,
// withRowSelect,
// hooks => {
// hooks.leafColumns.push(columns => [
// {
// id: 'selection',
// // Make this column a groupByBoundary. This ensures that groupBy columns
// // are placed after it
// groupByBoundary: true,
// // The header can use the table's getToggleAllRowsSelectedProps method
// // to render a checkbox
// Header: ({ getToggleAllRowsSelectedProps }) => (
// <div>
// <input type="checkbox" {...getToggleAllRowsSelectedProps()} />
// </div>
// ),
// // The cell can use the individual row's getToggleRowSelectedProps method
// // to the render a checkbox
// Cell: ({ row }) => (
// <div>
// <input type="checkbox" {...row.getToggleRowSelectedProps()} />
// </div>
// ),
// },
// ...columns,
// ])
// },
],
})
// Use the state and functions returned from useTable to build your UI
const {
getTableProps,
getTableBodyProps,
headerGroups,
prepareRow,
page, // Instead of using 'rows', we'll use page,
// which has only the rows for the active page
// The rest of these things are super handy, too ;)
pageOptions,
pageCount,
gotoPage,
nextPage,
previousPage,
setPageSize,
state: { pageIndex, pageSize, groupBy, expanded, filters, selection },
} = useTable({
columns,
data,
defaultColumn,
filterFns,
// nestExpandedRows: true,
initialState: { pageIndex: 2 },
// updateMyData isn't part of the API, but
// anything we put into these options will
// automatically be available on the instance.
// That way we can call this function from our
// cell renderer!
updateMyData,
// We also need to pass this so the page doesn't change
// when we edit the data, undefined means using the default
autoResetPage: !skipPageReset,
})
// Render the UI for your table
return (
<>
<table {...getTableProps()}>
<thead>
{headerGroups.map(headerGroup => (
<tr {...headerGroup.getHeaderGroupProps()}>
{headerGroup.headers.map(column => (
<th {...column.getHeaderProps()}>
<div>
{column.canGroupBy ? (
// If the column can be grouped, let's add a toggle
<span {...column.getGroupByToggleProps()}>
{column.isGrouped ? '🛑 ' : '👊 '}
</span>
) : null}
<span {...column.getSortByToggleProps()}>
{column.render('Header')}
{/* Add a sort direction indicator */}
{column.isSorted
? column.isSortedDesc
? ' 🔽'
: ' 🔼'
: ''}
</span>
</div>
{/* Render the columns filter UI */}
<div>{column.canFilter ? column.render('Filter') : null}</div>
</th>
))}
</tr>
))}
</thead>
<tbody {...getTableBodyProps()}>
{page.map(row => {
prepareRow(row)
return (
<tr {...row.getRowProps()}>
{row.cells.map(cell => {
return (
<td {...cell.getCellProps()}>
{cell.isGrouped ? (
// If it's a grouped cell, add an expander and row count
<>
<span {...row.getToggleRowExpandedProps()}>
{row.isExpanded ? '👇' : '👉'}
</span>{' '}
{cell.render('Cell', { editable: false })} (
{row.subRows.length})
</>
) : cell.isAggregated ? (
// If the cell is aggregated, use the Aggregated
// renderer for cell
cell.render('Aggregated')
) : cell.isPlaceholder ? null : ( // For cells with repeated values, render null
// Otherwise, just render the regular cell
cell.render('Cell', { editable: true })
)}
</td>
)
})}
</tr>
)
})}
</tbody>
</table>
{/*
Pagination can be built however you'd like.
This is just a very basic UI implementation:
*/}
<div className="pagination">
<button onClick={() => gotoPage(0)} disabled={!canPreviousPage}>
{'<<'}
</button>{' '}
<button onClick={() => previousPage()} disabled={!canPreviousPage}>
{'<'}
</button>{' '}
<button onClick={() => nextPage()} disabled={!canNextPage}>
{'>'}
</button>{' '}
<button onClick={() => gotoPage(pageCount - 1)} disabled={!canNextPage}>
{'>>'}
</button>{' '}
<span>
Page{' '}
<strong>
{pageIndex + 1} of {pageOptions.length}
</strong>{' '}
</span>
<span>
| Go to page:{' '}
<input
type="number"
defaultValue={pageIndex + 1}
onChange={e => {
const page = e.target.value ? Number(e.target.value) - 1 : 0
gotoPage(page)
}}
style={{ width: '100px' }}
/>
</span>{' '}
<select
value={pageSize}
onChange={e => {
setPageSize(Number(e.target.value))
}}
>
{[10, 20, 30, 40, 50].map(pageSize => (
<option key={pageSize} value={pageSize}>
Show {pageSize}
</option>
))}
</select>
</div>
<pre>
<code>
{JSON.stringify(
{
pageIndex,
pageSize,
pageCount,
groupBy,
expanded: expanded,
filters,
selection: selection,
},
null,
2
)}
</code>
</pre>
</>
)
}
// Define a custom filter filter function!
function filterGreaterThan(rows, id, filterValue) {
return rows.filter(row => {
const rowValue = row.values[id]
return rowValue >= filterValue
})
}
// This is an autoRemove method on the filter function that
// when given the new filter value and returns true, the filter
// will be automatically removed. Normally this is just an undefined
// check, but here, we want to remove the filter if it's not a number
filterGreaterThan.autoRemove = val => typeof val !== 'number'
// This is a custom aggregator that
// takes in an array of leaf values and
// returns the rounded median
function roundedMedian(leafValues) {
let min = leafValues[0] || 0
let max = leafValues[0] || 0
leafValues.forEach(value => {
min = Math.min(min, value)
max = Math.max(max, value)
})
return Math.round((min + max) / 2)
}
function App() {
const columns = React.useMemo(
() => [
{
Header: 'Name',
columns: [
{
Header: 'First Name',
accessor: 'firstName',
// Use a two-stage aggregator here to first
// count the total rows being aggregated,
// then sum any of those counts if they are
// aggregated further
aggregate: 'count',
Aggregated: ({ value }) => `${value} Names`,
},
{
Header: 'Last Name',
accessor: 'lastName',
// Use our custom `fuzzyText` filter on this column
filter: 'fuzzyText',
// Use another two-stage aggregator here to
// first count the UNIQUE values from the rows
// being aggregated, then sum those counts if
// they are aggregated further
aggregate: 'uniqueCount',
Aggregated: ({ value }) => `${value} Unique Names`,
},
],
},
{
Header: 'Info',
columns: [
{
Header: 'Age',
accessor: 'age',
Filter: SliderColumnFilter,
filter: 'equals',
// Aggregate the average age of visitors
aggregate: 'average',
Aggregated: ({ value }) => `${value} (avg)`,
},
{
Header: 'Visits',
accessor: 'visits',
Filter: NumberRangeColumnFilter,
filter: 'between',
// Aggregate the sum of all visits
aggregate: 'sum',
Aggregated: ({ value }) => `${value} (total)`,
},
{
Header: 'Status',
accessor: 'status',
Filter: SelectColumnFilter,
filter: 'includes',
},
{
Header: 'Profile Progress',
accessor: 'progress',
Filter: SliderColumnFilter,
filter: filterGreaterThan,
// Use our custom roundedMedian aggregator
aggregate: roundedMedian,
Aggregated: ({ value }) => `${value} (med)`,
},
],
},
],
[]
)
const [data, setData] = React.useState(() => makeData(10000))
const [originalData] = React.useState(data)
// We need to keep the table from resetting the pageIndex when we
// Update data. So we can keep track of that flag with a ref.
const skipPageResetRef = React.useRef(false)
// When our cell renderer calls updateMyData, we'll use
// the rowIndex, columnId and new value to update the
// original data
const updateMyData = (rowIndex, columnId, value) => {
// We also turn on the flag to not reset the page
skipPageResetRef.current = true
setData(old =>
old.map((row, index) => {
if (index === rowIndex) {
return {
...row,
[columnId]: value,
}
}
return row
})
)
}
// After data chagnes, we turn the flag back off
// so that if data actually changes when we're not
// editing it, the page is reset
React.useEffect(() => {
skipPageResetRef.current = false
}, [data])
// Let's add a data resetter/randomizer to help
// illustrate that flow...
const resetData = () => {
// Don't reset the page when we do this
skipPageResetRef.current = true
setData(originalData)
}
return (
<Styles>
<button onClick={resetData}>Reset Data</button>
<Table
columns={columns}
data={data}
updateMyData={updateMyData}
skipPageReset={skipPageResetRef.current}
/>
</Styles>
)
}
export default App | the_stack |
import _ from 'lodash';
import {addHash, unreachable} from '@statechannels/wallet-core';
import {SignedBy, StateWithBals, TestChannel} from '../../engine/__test__/fixtures/test-channel';
import {TestLedgerChannel} from '../../engine/__test__/fixtures/test-ledger-channel';
import {LedgerRequestStatus} from '../../models/ledger-request';
import {LedgerProtocol} from '../ledger-protocol';
import {Destination} from '../../type-aliases';
import {State} from '../../models/channel/state';
import {addState, clearOldStates, dropNonVariables} from '../../state-utils';
import {channel} from '../../models/__test__/fixtures/channel';
jest.setTimeout(10_000);
describe('as leader', () => {
describe('in the accept state', () => {
it(
'will create a proposal from requests in the queue',
testLedgerCrank({
as: 'leader',
before: {
agreed: {turn: 5, bals: {a: 5, b: 5, c: 10}},
requests: [
['fund', 'd', 1, 1, 'queued'],
['defund', 'c', 5, 5, 'queued'],
],
},
after: {
agreed: {turn: 5, bals: {a: 5, b: 5, c: 10}},
proposed: {turn: 6, bals: {a: 9, b: 9, d: 2}},
requests: [
['fund', 'd', 1, 1, 'pending', {missedOps: 0, lastSeen: 5}],
['defund', 'c', 5, 5, 'pending', {missedOps: 0, lastSeen: 5}],
],
},
})
);
it(
`will marks requests that appear in the state as successful`,
testLedgerCrank({
as: 'leader',
before: {
agreed: {turn: 6, bals: {a: 9, b: 9, d: 2}},
requests: [
['fund', 'd', 1, 1, 'pending', {missedOps: 0, lastSeen: 5}],
['defund', 'c', 5, 5, 'queued', {missedOps: 0, lastSeen: 5}],
],
},
after: {
agreed: {turn: 6, bals: {a: 9, b: 9, d: 2}},
requests: [
['fund', 'd', 1, 1, 'succeeded', {missedOps: 0, lastSeen: 6}],
['defund', 'c', 5, 5, 'succeeded', {missedOps: 0, lastSeen: 6}],
],
},
})
);
it(
`it increases missedOps if the requests aren't accepted`,
testLedgerCrank({
as: 'leader',
before: {
agreed: {turn: 6, bals: {a: 5, b: 5, c: 10}},
requests: [
['fund', 'd', 1, 1, 'pending', {missedOps: 0, lastSeen: 5}], // was pending, but not accepted
['defund', 'c', 5, 5, 'queued'], // didn't see state 5, so won't be a missedOp
],
},
after: {
agreed: {turn: 6, bals: {a: 5, b: 5, c: 10}},
proposed: {turn: 7, bals: {a: 9, b: 9, d: 2}},
requests: [
['fund', 'd', 1, 1, 'pending', {missedOps: 1, lastSeen: 6}],
['defund', 'c', 5, 5, 'pending', {missedOps: 0, lastSeen: 6}],
],
},
})
);
it(
`will mark bad requests as insufficent-funds / inconsistent`,
testLedgerCrank({
as: 'leader',
before: {
agreed: {turn: 5, bals: {a: 5, b: 5, c: 10}},
requests: [
['fund', 'd', 6, 6, 'queued', {missedOps: 0, lastSeen: 5}], // not enough funds
['defund', 'c', 1, 1, 'queued', {missedOps: 0, lastSeen: 5}], // c has 10, not 2
],
},
after: {
agreed: {turn: 5, bals: {a: 5, b: 5, c: 10}},
requests: [
['fund', 'd', 6, 6, 'insufficient-funds', {missedOps: 0, lastSeen: 5}],
['defund', 'c', 1, 1, 'inconsistent', {missedOps: 0, lastSeen: 5}],
],
},
})
);
it(
`will cancel two requests where the funding is still queued`,
testLedgerCrank({
as: 'leader',
before: {
agreed: {turn: 5, bals: {a: 5, b: 5, c: 10}},
requests: [
['fund', 'd', 1, 1, 'queued', {missedOps: 0, lastSeen: 5}],
['defund', 'd', 1, 1, 'queued'],
],
},
after: {
agreed: {turn: 5, bals: {a: 5, b: 5, c: 10}},
requests: [
['fund', 'd', 1, 1, 'cancelled', {missedOps: 0, lastSeen: 5}],
['defund', 'd', 1, 1, 'cancelled', {missedOps: 0, lastSeen: 5}],
],
},
})
);
it(
`will cancel two requests where the funding was pending but not accepted`,
testLedgerCrank({
as: 'leader',
before: {
agreed: {turn: 5, bals: {a: 5, b: 5, c: 10}},
requests: [
['fund', 'd', 1, 1, 'pending', {missedOps: 0, lastSeen: 5}],
['defund', 'd', 1, 1, 'queued'],
],
},
after: {
agreed: {turn: 5, bals: {a: 5, b: 5, c: 10}},
requests: [
['fund', 'd', 1, 1, 'cancelled', {missedOps: 0, lastSeen: 5}],
['defund', 'd', 1, 1, 'cancelled', {missedOps: 0, lastSeen: 5}],
],
},
})
);
it(
`won't cancel requests if the funding has already been accepted`,
testLedgerCrank({
as: 'leader',
before: {
agreed: {turn: 5, bals: {a: 4, b: 4, c: 10, d: 2}},
requests: [
['fund', 'd', 1, 1, 'pending', {missedOps: 0, lastSeen: 5}],
['defund', 'd', 1, 1, 'queued'],
],
},
after: {
agreed: {turn: 5, bals: {a: 4, b: 4, c: 10, d: 2}},
proposed: {turn: 6, bals: {a: 5, b: 5, c: 10}},
requests: [
['fund', 'd', 1, 1, 'succeeded', {missedOps: 0, lastSeen: 5}],
['defund', 'd', 1, 1, 'pending', {missedOps: 0, lastSeen: 5}],
],
},
})
);
it(
`won't mark requests as insufficient-funding if there's a defund that frees enough funds`,
testLedgerCrank({
as: 'leader',
before: {
agreed: {turn: 5, bals: {a: 5, b: 5, c: 10}},
requests: [
['fund', 'd', 6, 6, 'pending'], // not enough funds for this
['defund', 'c', 1, 9, 'queued'], // .. unless this defund is applied first
],
},
after: {
agreed: {turn: 5, bals: {a: 5, b: 5, c: 10}},
proposed: {turn: 6, bals: {a: 0, b: 8, d: 12}},
requests: [
['fund', 'd', 6, 6, 'pending', {missedOps: 0, lastSeen: 5}],
['defund', 'c', 1, 9, 'pending', {missedOps: 0, lastSeen: 5}],
],
},
})
);
});
describe('in the proposal state', () => {
it(
'takes no action',
testLedgerCrank({
as: 'leader',
before: {
agreed: {turn: 5, bals: {a: 5, b: 5, c: 10}},
proposed: {turn: 6, bals: {a: 9, b: 9, d: 2}},
requests: [
['fund', 'd', 1, 1, 'pending', {missedOps: 0, lastSeen: 5}],
['defund', 'c', 5, 5, 'pending', {missedOps: 0, lastSeen: 5}],
['fund', 'e', 1, 1, 'queued', {missedOps: 0, lastSeen: 5}],
],
},
after: {
agreed: {turn: 5, bals: {a: 5, b: 5, c: 10}},
proposed: {turn: 6, bals: {a: 9, b: 9, d: 2}},
requests: [
['fund', 'd', 1, 1, 'pending', {missedOps: 0, lastSeen: 5}],
['defund', 'c', 5, 5, 'pending', {missedOps: 0, lastSeen: 5}],
['fund', 'e', 1, 1, 'queued', {missedOps: 0, lastSeen: 5}],
],
},
})
);
});
describe('in the counter-proposal state', () => {
it(
`will accept a counter-proposal and re-propose any missing requests`,
testLedgerCrank({
as: 'leader',
before: {
agreed: {turn: 5, bals: {a: 5, b: 5, c: 10}},
proposed: {turn: 6, bals: {a: 9, b: 9, d: 2}}, // leader proposes fund d and defund c
counterProposed: {turn: 7, bals: {a: 4, b: 4, c: 10, d: 2}}, // follower just accepted fund d
requests: [
['fund', 'd', 1, 1, 'pending', {missedOps: 0, lastSeen: 5}],
['defund', 'c', 5, 5, 'pending', {missedOps: 0, lastSeen: 5}],
],
},
after: {
agreed: {turn: 7, bals: {a: 4, b: 4, c: 10, d: 2}}, // counterProposal -> agreed
proposed: {turn: 8, bals: {a: 9, b: 9, d: 2}}, // re-propose to defund c
requests: [
['fund', 'd', 1, 1, 'succeeded', {missedOps: 0, lastSeen: 7}],
['defund', 'c', 5, 5, 'pending', {missedOps: 1, lastSeen: 7}],
],
},
})
);
it(
`won't apply cancellations if the state was accepted`,
testLedgerCrank({
as: 'leader',
before: {
agreed: {turn: 5, bals: {a: 10, b: 10}},
proposed: {turn: 6, bals: {a: 7, b: 8, d: 2, c: 3}}, // leader proposes fund c and d
counterProposed: {turn: 7, bals: {a: 9, b: 9, d: 2}}, // follower just accepted fund d
requests: [
['fund', 'd', 1, 1, 'pending', {missedOps: 0, lastSeen: 5}],
['fund', 'c', 2, 1, 'pending', {missedOps: 0, lastSeen: 5}],
['defund', 'd', 1, 1, 'queued'], // but now we have a defund request for d
],
},
after: {
agreed: {turn: 7, bals: {a: 9, b: 9, d: 2}}, // leader accepts counterproposal
proposed: {turn: 8, bals: {a: 8, b: 9, c: 3}}, // re-proposes fund c and defund d
requests: [
['defund', 'd', 1, 1, 'pending', {missedOps: 0, lastSeen: 7}],
['fund', 'd', 1, 1, 'succeeded', {missedOps: 0, lastSeen: 7}],
['fund', 'c', 2, 1, 'pending', {missedOps: 1, lastSeen: 7}],
],
},
})
);
it(
`will apply cancellations if the state wasn't accepted`,
testLedgerCrank({
as: 'leader',
before: {
agreed: {turn: 5, bals: {a: 10, b: 10}},
proposed: {turn: 6, bals: {a: 7, b: 8, d: 2, c: 3}}, // leader proposes fund c and d
counterProposed: {turn: 7, bals: {a: 9, b: 9, d: 2}}, // follower just accepted fund d
requests: [
['fund', 'd', 1, 1, 'pending', {missedOps: 0, lastSeen: 5}],
['fund', 'c', 2, 1, 'pending', {missedOps: 0, lastSeen: 5}],
['defund', 'c', 2, 1, 'queued'], // but now we have a defund request for c
],
},
after: {
agreed: {turn: 7, bals: {a: 9, b: 9, d: 2}}, // leader accepts counterproposal
requests: [
['fund', 'd', 1, 1, 'succeeded', {missedOps: 0, lastSeen: 7}],
['fund', 'c', 2, 1, 'cancelled', {missedOps: 0, lastSeen: 7}],
['defund', 'c', 2, 1, 'cancelled', {missedOps: 0, lastSeen: 7}],
],
},
})
);
});
});
describe('as follower', () => {
describe('in the agreement state', () => {
it(
`marks included requests as succeeded`,
testLedgerCrank({
as: 'follower',
before: {
agreed: {turn: 5, bals: {a: 10, b: 10, c: 2}},
requests: [
['defund', 'd', 1, 1, 'pending'], // d is not there, so this succeeded
['fund', 'c', 2, 0, 'pending'], // this also succeeded
],
},
after: {
agreed: {turn: 5, bals: {a: 10, b: 10, c: 2}},
requests: [
['defund', 'd', 1, 1, 'succeeded', {missedOps: 0, lastSeen: 5}],
['fund', 'c', 2, 0, 'succeeded', {missedOps: 0, lastSeen: 5}],
],
},
})
);
it(
`marks inconsistent states`,
testLedgerCrank({
as: 'follower',
before: {
agreed: {turn: 5, bals: {a: 10, b: 10, c: 2, d: 1}},
requests: [
['defund', 'd', 1, 1, 'queued'], // amount doesn't match with d's total
['fund', 'c', 2, 1, 'queued'], // c is funded, but with a different amt
],
},
after: {
agreed: {turn: 5, bals: {a: 10, b: 10, c: 2, d: 1}},
requests: [
['defund', 'd', 1, 1, 'inconsistent', {missedOps: 0, lastSeen: 5}],
['fund', 'c', 2, 1, 'inconsistent', {missedOps: 0, lastSeen: 5}],
],
},
})
);
it(
`will cancel requests that haven't been included`,
testLedgerCrank({
as: 'follower',
before: {
agreed: {turn: 5, bals: {a: 10, b: 10}},
requests: [
['fund', 'c', 2, 1, 'queued'],
['defund', 'c', 2, 1, 'queued'],
],
},
after: {
agreed: {turn: 5, bals: {a: 10, b: 10}},
requests: [
['fund', 'c', 2, 1, 'cancelled', {missedOps: 0, lastSeen: 5}],
['defund', 'c', 2, 1, 'cancelled', {missedOps: 0, lastSeen: 5}],
],
},
})
);
it(
`won't cancel requests that have been included`,
testLedgerCrank({
as: 'follower',
before: {
agreed: {turn: 5, bals: {a: 10, b: 10, c: 3}},
requests: [
['fund', 'c', 2, 1, 'pending'], // has been included
['defund', 'c', 2, 1, 'queued'],
],
},
after: {
agreed: {turn: 5, bals: {a: 10, b: 10, c: 3}},
requests: [
['fund', 'c', 2, 1, 'succeeded', {missedOps: 0, lastSeen: 5}],
['defund', 'c', 2, 1, 'queued', {missedOps: 0, lastSeen: 5}],
],
},
})
);
});
describe('in the proposal state', () => {
it(
`accepts the state if complete overlap`,
testLedgerCrank({
as: 'follower',
before: {
agreed: {turn: 5, bals: {a: 10, b: 10, c: 3}},
proposed: {turn: 6, bals: {a: 8, b: 10, d: 5}},
requests: [
['fund', 'd', 4, 1, 'queued'], // included in proposal
['defund', 'c', 2, 1, 'queued'], // included in proposal
['fund', 'e', 1, 1, 'queued'],
],
},
after: {
agreed: {turn: 6, bals: {a: 8, b: 10, d: 5}},
requests: [
['fund', 'd', 4, 1, 'succeeded', {missedOps: 0, lastSeen: 6}],
['defund', 'c', 2, 1, 'succeeded', {missedOps: 0, lastSeen: 6}],
['fund', 'e', 1, 1, 'queued', {missedOps: 1, lastSeen: 6}],
],
},
})
);
it(
`makes a counterproposal if there's some overlap`,
testLedgerCrank({
as: 'follower',
before: {
agreed: {turn: 5, bals: {a: 10, b: 10, c: 3, g: 1}},
proposed: {turn: 6, bals: {a: 8, b: 10, d: 5, f: 2}},
requests: [
['fund', 'd', 4, 1, 'queued'], // included in proposal
['defund', 'c', 2, 1, 'queued'], // included in proposal
['fund', 'e', 1, 1, 'queued'],
// don't have defund g
// don't have fund f
],
},
after: {
agreed: {turn: 5, bals: {a: 10, b: 10, c: 3, g: 1}},
proposed: {turn: 6, bals: {a: 8, b: 10, d: 5, f: 2}},
counterProposed: {turn: 7, bals: {a: 8, b: 10, d: 5, g: 1}},
requests: [
['fund', 'd', 4, 1, 'pending', {missedOps: 0, lastSeen: 5}],
['defund', 'c', 2, 1, 'pending', {missedOps: 0, lastSeen: 5}],
['fund', 'e', 1, 1, 'queued', {missedOps: 0, lastSeen: 5}],
],
},
})
);
it(
`makes a counterproposal to return to agreed if there's no overlap`,
testLedgerCrank({
as: 'follower',
before: {
agreed: {turn: 5, bals: {a: 10, b: 10, c: 3, g: 1}},
proposed: {turn: 6, bals: {a: 8, b: 10, d: 5, f: 2}},
requests: [
['fund', 'e', 1, 1, 'queued'],
// don't have fund f, defund g, fund d or defund c
],
},
after: {
agreed: {turn: 5, bals: {a: 10, b: 10, c: 3, g: 1}},
proposed: {turn: 6, bals: {a: 8, b: 10, d: 5, f: 2}},
counterProposed: {turn: 7, bals: {a: 10, b: 10, c: 3, g: 1}},
requests: [['fund', 'e', 1, 1, 'queued', {missedOps: 0, lastSeen: 5}]],
},
})
);
});
describe('in the counter-proposal state', () => {
it(
`takes no action`,
testLedgerCrank({
as: 'follower',
before: {
agreed: {turn: 5, bals: {a: 10, b: 10, c: 3, g: 1}},
proposed: {turn: 6, bals: {a: 8, b: 10, d: 5, f: 2}},
counterProposed: {turn: 7, bals: {a: 8, b: 10, d: 5, g: 1}},
requests: [
['fund', 'd', 4, 1, 'pending', {missedOps: 0, lastSeen: 5}],
['defund', 'c', 2, 1, 'pending', {missedOps: 0, lastSeen: 5}],
['fund', 'e', 1, 1, 'queued', {missedOps: 0, lastSeen: 5}],
],
},
after: {
agreed: {turn: 5, bals: {a: 10, b: 10, c: 3, g: 1}},
proposed: {turn: 6, bals: {a: 8, b: 10, d: 5, f: 2}},
counterProposed: {turn: 7, bals: {a: 8, b: 10, d: 5, g: 1}},
requests: [
['fund', 'd', 4, 1, 'pending', {missedOps: 0, lastSeen: 5}],
['defund', 'c', 2, 1, 'pending', {missedOps: 0, lastSeen: 5}],
['fund', 'e', 1, 1, 'queued', {missedOps: 0, lastSeen: 5}],
],
},
})
);
});
});
function testLedgerCrank(args: LedgerCrankTestCaseArgs): () => void {
return () => {
// setup
// -----
const testCase = new LedgerCrankTestCase(args);
// - create skeleton ledgerChannelObject
const ledgerChannel = TestLedgerChannel.create({});
const channelLookup = new ChannelLookup();
channelLookup.set('a', ledgerChannel.participantA.destination);
channelLookup.set('b', ledgerChannel.participantB.destination);
// first we need to turn strings like 'c' and 'd' into actual channels in the store
for (const key of testCase.referencedChannelDests) {
channelLookup.set(key, TestChannel.create({aBal: 5, bBal: 5}).channelId);
}
// - construct and add the before states
const stateToParams = (
state: StateDesc,
type: 'agreed' | 'proposed' | 'counter-proposed'
): StateWithBals => ({
turn: state.turn,
bals: Object.keys(state.bals).map(
k => [channelLookup.get(k), state.bals[k]] as [string, number]
),
signedBy: {agreed: 'both', proposed: 0, 'counter-proposed': 1}[type] as SignedBy,
});
const initialStates: StateWithBals[] = _.compact([
testCase.agreedBefore && stateToParams(testCase.agreedBefore, 'agreed'),
testCase.proposedBefore && stateToParams(testCase.proposedBefore, 'proposed'),
testCase.counterProposedBefore &&
stateToParams(testCase.counterProposedBefore, 'counter-proposed'),
]);
const myIndex = args.as === 'leader' ? 0 : 1;
const requests = testCase.requestsBefore.map(req => {
const channelToBeFunded = channelLookup.get(req.channelKey);
switch (req.type) {
case 'fund':
return ledgerChannel.fundingRequest({...req, channelToBeFunded});
case 'defund':
return ledgerChannel.defundingRequest({...req, channelToBeFunded});
default:
unreachable(req.type);
}
});
const vars = initialStates
.map(s => ledgerChannel.signedStateWithHash(s.turn, s.bals, s.signedBy))
.map(dropNonVariables);
const {address: signingAddress} = ledgerChannel.signingWallets[myIndex];
const ledger = channel({...ledgerChannel.channelConstants, signingAddress, vars});
// crank (and update ledger vars)
// -----
new LedgerProtocol().crank(ledger, requests).map(s => {
const actualState = addHash(s.signedState);
const signatures = [ledgerChannel.signingWallets[myIndex].signState(actualState)];
ledger.vars = addState(ledger.vars, {...actualState, signatures});
});
ledger.vars = clearOldStates(ledger.vars, ledger.support);
// assertions
// ----------
// get the latest agreed state and the turn number
const agreedState = ledger.latestFullySignedState;
if (!agreedState) throw new Error(`No latest agreed state`);
const ledgerStateDesc: LedgerStateDescription = {
agreed: toStateDesc(agreedState, channelLookup),
requests: [],
};
const proposedState = ledger.uniqueStateAt(agreedState.turnNum + 1);
if (proposedState) {
expect(proposedState.signerIndices).toEqual([0]);
ledgerStateDesc['proposed'] = toStateDesc(proposedState, channelLookup);
}
const counterProposedState = ledger.uniqueStateAt(agreedState.turnNum + 2);
if (counterProposedState) {
expect(counterProposedState.signerIndices).toEqual([1]);
ledgerStateDesc['counterProposed'] = toStateDesc(counterProposedState, channelLookup);
}
ledgerStateDesc.requests = requests.map(req => [
req.type,
channelLookup.getKey(req.channelToBeFunded),
Number(req.amountA),
Number(req.amountB),
req.status,
{missedOps: req.missedOpportunityCount, lastSeen: req.lastSeenAgreedState || undefined},
]);
ledgerStateDesc.requests.sort();
args.after.requests.sort();
expect(ledgerStateDesc).toEqual(args.after);
};
}
function toStateDesc(state: State, lookup: ChannelLookup): StateDesc {
const bals: StateDesc['bals'] = {};
state.simpleAllocationOutcome?.items.forEach(
i => (bals[lookup.getKey(i.destination)] = Number(i.amount))
);
return {
turn: state.turnNum,
bals,
};
}
class LedgerCrankTestCase {
constructor(private args: LedgerCrankTestCaseArgs) {}
get statesBefore(): StateDesc[] {
return _.compact([
this.args.before.agreed,
this.args.before.proposed,
this.args.before.counterProposed,
]);
}
get statesAfter(): StateDesc[] {
return _.compact([
this.args.after.agreed,
this.args.after.proposed,
this.args.after.counterProposed,
]);
}
get agreedBefore(): StateDesc {
return this.args.before.agreed;
}
get proposedBefore(): StateDesc | undefined {
return this.args.before.proposed;
}
get counterProposedBefore(): StateDesc | undefined {
return this.args.before.counterProposed;
}
get agreedAfter(): StateDesc {
return this.args.after.agreed;
}
get proposedAfter(): StateDesc | undefined {
return this.args.after.proposed;
}
get counterProposedAfter(): StateDesc | undefined {
return this.args.after.counterProposed;
}
get requestsBefore(): Request[] {
return this.args.before.requests.map(r => ({
type: r[0],
channelKey: r[1],
amtA: r[2],
amtB: r[3],
status: r[4],
missedOps: r[5]?.missedOps,
lastSeen: r[5]?.lastSeen,
}));
}
get requestsAfter(): Request[] {
return this.args.after.requests.map(r => ({
type: r[0],
channelKey: r[1],
amtA: r[2],
amtB: r[3],
status: r[4],
missedOps: r[5]?.missedOps,
lastSeen: r[5]?.lastSeen,
}));
}
get referencedChannelDests(): string[] {
const referencedChannels = new Set<string>();
// add any channels referenced in the states
[...this.statesBefore, ...this.statesAfter]
.flatMap(s => Object.keys(s.bals))
.forEach(x => referencedChannels.add(x));
// as well as any channels referenced in the requests
[...this.args.before.requests, ...this.args.after.requests].forEach(r =>
referencedChannels.add(r[1])
);
// a and b are special and refer to the participants
referencedChannels.delete('a');
referencedChannels.delete('b');
return Array.from(referencedChannels);
}
}
class ChannelLookup {
private lookup: Record<string, Destination> = {};
get(key: string): Destination {
const val = this.lookup[key];
if (!val) throw Error(`ChannelLookup missing key ${key}`);
return val;
}
getKey(val: string): string {
const key = Object.keys(this.lookup).find(key => this.lookup[key] === val);
if (!key) throw Error(`ChannelLookup missing key for value ${val}`);
return key;
}
set(k: string, v: Destination): void {
this.lookup[k] = v;
}
}
interface Request {
type: 'fund' | 'defund';
channelKey: string;
amtA: number;
amtB: number;
status: LedgerRequestStatus;
missedOps?: number;
lastSeen?: number;
}
type RequestDesc =
| ['defund' | 'fund', string, number, number, LedgerRequestStatus]
| [
'defund' | 'fund',
string,
number,
number,
LedgerRequestStatus,
{missedOps?: number; lastSeen?: number}
];
type StateDesc = {
turn: number;
bals: Record<string, number>;
};
interface LedgerStateDescription {
agreed: StateDesc;
proposed?: StateDesc;
counterProposed?: StateDesc;
requests: RequestDesc[];
}
interface LedgerCrankTestCaseArgs {
as: 'leader' | 'follower';
before: LedgerStateDescription;
after: LedgerStateDescription;
} | the_stack |
* OasisExplorer API
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
import * as runtime from '../runtime';
import {
AccountListItem,
AccountListItemFromJSON,
AccountListItemToJSON,
AccountReward,
AccountRewardFromJSON,
AccountRewardToJSON,
AccountRewardsStat,
AccountRewardsStatFromJSON,
AccountRewardsStatToJSON,
AccountsRow,
AccountsRowFromJSON,
AccountsRowToJSON,
BalanceChart,
BalanceChartFromJSON,
BalanceChartToJSON,
ValidatorDelegator,
ValidatorDelegatorFromJSON,
ValidatorDelegatorToJSON,
ValidatorEntity,
ValidatorEntityFromJSON,
ValidatorEntityToJSON,
ValidatorReward,
ValidatorRewardFromJSON,
ValidatorRewardToJSON,
ValidatorRewardsStat,
ValidatorRewardsStatFromJSON,
ValidatorRewardsStatToJSON,
ValidatorRow,
ValidatorRowFromJSON,
ValidatorRowToJSON,
ValidatorStat,
ValidatorStatFromJSON,
ValidatorStatToJSON,
} from '../models';
export interface GetAccountRequest {
accountId: string;
}
export interface GetAccountBalanceChartRequest {
accountId: string;
frame: GetAccountBalanceChartFrameEnum;
from: number;
to: number;
}
export interface GetAccountRewardsRequest {
accountId: string;
limit: number;
offset: number;
}
export interface GetAccountsListRequest {
limit?: number;
offset?: number;
sortColumn?: GetAccountsListSortColumnEnum;
sortSide?: GetAccountsListSortSideEnum;
}
export interface GetAccountsRewardsStatRequest {
accountId: string;
}
export interface GetValidatorDelegatorsListRequest {
accountId: string;
limit?: number;
offset?: number;
}
export interface GetValidatorInfoRequest {
accountId: string;
limit?: number;
offset?: number;
}
export interface GetValidatorRewardsRequest {
accountId: string;
limit: number;
offset: number;
}
export interface GetValidatorRewardsStatRequest {
accountId: string;
}
export interface GetValidatorStatChartRequest {
accountId: string;
frame: GetValidatorStatChartFrameEnum;
from: number;
to: number;
}
export interface GetValidatorsListRequest {
limit?: number;
offset?: number;
}
/**
*
*/
export class AccountsApi extends runtime.BaseAPI {
/**
*/
async getAccountRaw(requestParameters: GetAccountRequest): Promise<runtime.ApiResponse<AccountsRow>> {
if (requestParameters.accountId === null || requestParameters.accountId === undefined) {
throw new runtime.RequiredError('accountId','Required parameter requestParameters.accountId was null or undefined when calling getAccount.');
}
const queryParameters: any = {};
const headerParameters: runtime.HTTPHeaders = {};
const response = await this.request({
path: `/data/accounts/{account_id}`.replace(`{${"account_id"}}`, encodeURIComponent(String(requestParameters.accountId))),
method: 'GET',
headers: headerParameters,
query: queryParameters,
});
return new runtime.JSONApiResponse(response, (jsonValue) => AccountsRowFromJSON(jsonValue));
}
/**
*/
async getAccount(requestParameters: GetAccountRequest): Promise<AccountsRow> {
const response = await this.getAccountRaw(requestParameters);
return await response.value();
}
/**
*/
async getAccountBalanceChartRaw(requestParameters: GetAccountBalanceChartRequest): Promise<runtime.ApiResponse<Array<BalanceChart>>> {
if (requestParameters.accountId === null || requestParameters.accountId === undefined) {
throw new runtime.RequiredError('accountId','Required parameter requestParameters.accountId was null or undefined when calling getAccountBalanceChart.');
}
if (requestParameters.frame === null || requestParameters.frame === undefined) {
throw new runtime.RequiredError('frame','Required parameter requestParameters.frame was null or undefined when calling getAccountBalanceChart.');
}
if (requestParameters.from === null || requestParameters.from === undefined) {
throw new runtime.RequiredError('from','Required parameter requestParameters.from was null or undefined when calling getAccountBalanceChart.');
}
if (requestParameters.to === null || requestParameters.to === undefined) {
throw new runtime.RequiredError('to','Required parameter requestParameters.to was null or undefined when calling getAccountBalanceChart.');
}
const queryParameters: any = {};
if (requestParameters.frame !== undefined) {
queryParameters['frame'] = requestParameters.frame;
}
if (requestParameters.from !== undefined) {
queryParameters['from'] = requestParameters.from;
}
if (requestParameters.to !== undefined) {
queryParameters['to'] = requestParameters.to;
}
const headerParameters: runtime.HTTPHeaders = {};
const response = await this.request({
path: `/chart/balance/{account_id}`.replace(`{${"account_id"}}`, encodeURIComponent(String(requestParameters.accountId))),
method: 'GET',
headers: headerParameters,
query: queryParameters,
});
return new runtime.JSONApiResponse(response, (jsonValue) => jsonValue.map(BalanceChartFromJSON));
}
/**
*/
async getAccountBalanceChart(requestParameters: GetAccountBalanceChartRequest): Promise<Array<BalanceChart>> {
const response = await this.getAccountBalanceChartRaw(requestParameters);
return await response.value();
}
/**
*/
async getAccountRewardsRaw(requestParameters: GetAccountRewardsRequest): Promise<runtime.ApiResponse<Array<AccountReward>>> {
if (requestParameters.accountId === null || requestParameters.accountId === undefined) {
throw new runtime.RequiredError('accountId','Required parameter requestParameters.accountId was null or undefined when calling getAccountRewards.');
}
if (requestParameters.limit === null || requestParameters.limit === undefined) {
throw new runtime.RequiredError('limit','Required parameter requestParameters.limit was null or undefined when calling getAccountRewards.');
}
if (requestParameters.offset === null || requestParameters.offset === undefined) {
throw new runtime.RequiredError('offset','Required parameter requestParameters.offset was null or undefined when calling getAccountRewards.');
}
const queryParameters: any = {};
if (requestParameters.limit !== undefined) {
queryParameters['limit'] = requestParameters.limit;
}
if (requestParameters.offset !== undefined) {
queryParameters['offset'] = requestParameters.offset;
}
const headerParameters: runtime.HTTPHeaders = {};
const response = await this.request({
path: `/data/accounts/{account_id}/rewards`.replace(`{${"account_id"}}`, encodeURIComponent(String(requestParameters.accountId))),
method: 'GET',
headers: headerParameters,
query: queryParameters,
});
return new runtime.JSONApiResponse(response, (jsonValue) => jsonValue.map(AccountRewardFromJSON));
}
/**
*/
async getAccountRewards(requestParameters: GetAccountRewardsRequest): Promise<Array<AccountReward>> {
const response = await this.getAccountRewardsRaw(requestParameters);
return await response.value();
}
/**
*/
async getAccountsListRaw(requestParameters: GetAccountsListRequest): Promise<runtime.ApiResponse<Array<AccountListItem>>> {
const queryParameters: any = {};
if (requestParameters.limit !== undefined) {
queryParameters['limit'] = requestParameters.limit;
}
if (requestParameters.offset !== undefined) {
queryParameters['offset'] = requestParameters.offset;
}
if (requestParameters.sortColumn !== undefined) {
queryParameters['sort_column'] = requestParameters.sortColumn;
}
if (requestParameters.sortSide !== undefined) {
queryParameters['sort_side'] = requestParameters.sortSide;
}
const headerParameters: runtime.HTTPHeaders = {};
const response = await this.request({
path: `/data/accounts`,
method: 'GET',
headers: headerParameters,
query: queryParameters,
});
return new runtime.JSONApiResponse(response, (jsonValue) => jsonValue.map(AccountListItemFromJSON));
}
/**
*/
async getAccountsList(requestParameters: GetAccountsListRequest): Promise<Array<AccountListItem>> {
const response = await this.getAccountsListRaw(requestParameters);
return await response.value();
}
/**
*/
async getAccountsRewardsStatRaw(requestParameters: GetAccountsRewardsStatRequest): Promise<runtime.ApiResponse<AccountRewardsStat>> {
if (requestParameters.accountId === null || requestParameters.accountId === undefined) {
throw new runtime.RequiredError('accountId','Required parameter requestParameters.accountId was null or undefined when calling getAccountsRewardsStat.');
}
const queryParameters: any = {};
const headerParameters: runtime.HTTPHeaders = {};
const response = await this.request({
path: `/data/accounts/{account_id}/rewards/stat`.replace(`{${"account_id"}}`, encodeURIComponent(String(requestParameters.accountId))),
method: 'GET',
headers: headerParameters,
query: queryParameters,
});
return new runtime.JSONApiResponse(response, (jsonValue) => AccountRewardsStatFromJSON(jsonValue));
}
/**
*/
async getAccountsRewardsStat(requestParameters: GetAccountsRewardsStatRequest): Promise<AccountRewardsStat> {
const response = await this.getAccountsRewardsStatRaw(requestParameters);
return await response.value();
}
/**
*/
async getPublicValidatorsSearchListRaw(): Promise<runtime.ApiResponse<Array<ValidatorEntity>>> {
const queryParameters: any = {};
const headerParameters: runtime.HTTPHeaders = {};
const response = await this.request({
path: `/data/validators/search`,
method: 'GET',
headers: headerParameters,
query: queryParameters,
});
return new runtime.JSONApiResponse(response, (jsonValue) => jsonValue.map(ValidatorEntityFromJSON));
}
/**
*/
async getPublicValidatorsSearchList(): Promise<Array<ValidatorEntity>> {
const response = await this.getPublicValidatorsSearchListRaw();
return await response.value();
}
/**
*/
async getValidatorDelegatorsListRaw(requestParameters: GetValidatorDelegatorsListRequest): Promise<runtime.ApiResponse<Array<ValidatorDelegator>>> {
if (requestParameters.accountId === null || requestParameters.accountId === undefined) {
throw new runtime.RequiredError('accountId','Required parameter requestParameters.accountId was null or undefined when calling getValidatorDelegatorsList.');
}
const queryParameters: any = {};
if (requestParameters.limit !== undefined) {
queryParameters['limit'] = requestParameters.limit;
}
if (requestParameters.offset !== undefined) {
queryParameters['offset'] = requestParameters.offset;
}
const headerParameters: runtime.HTTPHeaders = {};
const response = await this.request({
path: `/data/validator/{account_id}/delegators`.replace(`{${"account_id"}}`, encodeURIComponent(String(requestParameters.accountId))),
method: 'GET',
headers: headerParameters,
query: queryParameters,
});
return new runtime.JSONApiResponse(response, (jsonValue) => jsonValue.map(ValidatorDelegatorFromJSON));
}
/**
*/
async getValidatorDelegatorsList(requestParameters: GetValidatorDelegatorsListRequest): Promise<Array<ValidatorDelegator>> {
const response = await this.getValidatorDelegatorsListRaw(requestParameters);
return await response.value();
}
/**
*/
async getValidatorInfoRaw(requestParameters: GetValidatorInfoRequest): Promise<runtime.ApiResponse<Array<ValidatorRow>>> {
if (requestParameters.accountId === null || requestParameters.accountId === undefined) {
throw new runtime.RequiredError('accountId','Required parameter requestParameters.accountId was null or undefined when calling getValidatorInfo.');
}
const queryParameters: any = {};
if (requestParameters.limit !== undefined) {
queryParameters['limit'] = requestParameters.limit;
}
if (requestParameters.offset !== undefined) {
queryParameters['offset'] = requestParameters.offset;
}
const headerParameters: runtime.HTTPHeaders = {};
const response = await this.request({
path: `/data/validator/{account_id}`.replace(`{${"account_id"}}`, encodeURIComponent(String(requestParameters.accountId))),
method: 'GET',
headers: headerParameters,
query: queryParameters,
});
return new runtime.JSONApiResponse(response, (jsonValue) => jsonValue.map(ValidatorRowFromJSON));
}
/**
*/
async getValidatorInfo(requestParameters: GetValidatorInfoRequest): Promise<Array<ValidatorRow>> {
const response = await this.getValidatorInfoRaw(requestParameters);
return await response.value();
}
/**
*/
async getValidatorRewardsRaw(requestParameters: GetValidatorRewardsRequest): Promise<runtime.ApiResponse<Array<ValidatorReward>>> {
if (requestParameters.accountId === null || requestParameters.accountId === undefined) {
throw new runtime.RequiredError('accountId','Required parameter requestParameters.accountId was null or undefined when calling getValidatorRewards.');
}
if (requestParameters.limit === null || requestParameters.limit === undefined) {
throw new runtime.RequiredError('limit','Required parameter requestParameters.limit was null or undefined when calling getValidatorRewards.');
}
if (requestParameters.offset === null || requestParameters.offset === undefined) {
throw new runtime.RequiredError('offset','Required parameter requestParameters.offset was null or undefined when calling getValidatorRewards.');
}
const queryParameters: any = {};
if (requestParameters.limit !== undefined) {
queryParameters['limit'] = requestParameters.limit;
}
if (requestParameters.offset !== undefined) {
queryParameters['offset'] = requestParameters.offset;
}
const headerParameters: runtime.HTTPHeaders = {};
const response = await this.request({
path: `/data/validator/{account_id}/rewards`.replace(`{${"account_id"}}`, encodeURIComponent(String(requestParameters.accountId))),
method: 'GET',
headers: headerParameters,
query: queryParameters,
});
return new runtime.JSONApiResponse(response, (jsonValue) => jsonValue.map(ValidatorRewardFromJSON));
}
/**
*/
async getValidatorRewards(requestParameters: GetValidatorRewardsRequest): Promise<Array<ValidatorReward>> {
const response = await this.getValidatorRewardsRaw(requestParameters);
return await response.value();
}
/**
*/
async getValidatorRewardsStatRaw(requestParameters: GetValidatorRewardsStatRequest): Promise<runtime.ApiResponse<ValidatorRewardsStat>> {
if (requestParameters.accountId === null || requestParameters.accountId === undefined) {
throw new runtime.RequiredError('accountId','Required parameter requestParameters.accountId was null or undefined when calling getValidatorRewardsStat.');
}
const queryParameters: any = {};
const headerParameters: runtime.HTTPHeaders = {};
const response = await this.request({
path: `/data/validator/{account_id}/rewards/stat`.replace(`{${"account_id"}}`, encodeURIComponent(String(requestParameters.accountId))),
method: 'GET',
headers: headerParameters,
query: queryParameters,
});
return new runtime.JSONApiResponse(response, (jsonValue) => ValidatorRewardsStatFromJSON(jsonValue));
}
/**
*/
async getValidatorRewardsStat(requestParameters: GetValidatorRewardsStatRequest): Promise<ValidatorRewardsStat> {
const response = await this.getValidatorRewardsStatRaw(requestParameters);
return await response.value();
}
/**
*/
async getValidatorStatChartRaw(requestParameters: GetValidatorStatChartRequest): Promise<runtime.ApiResponse<Array<ValidatorStat>>> {
if (requestParameters.accountId === null || requestParameters.accountId === undefined) {
throw new runtime.RequiredError('accountId','Required parameter requestParameters.accountId was null or undefined when calling getValidatorStatChart.');
}
if (requestParameters.frame === null || requestParameters.frame === undefined) {
throw new runtime.RequiredError('frame','Required parameter requestParameters.frame was null or undefined when calling getValidatorStatChart.');
}
if (requestParameters.from === null || requestParameters.from === undefined) {
throw new runtime.RequiredError('from','Required parameter requestParameters.from was null or undefined when calling getValidatorStatChart.');
}
if (requestParameters.to === null || requestParameters.to === undefined) {
throw new runtime.RequiredError('to','Required parameter requestParameters.to was null or undefined when calling getValidatorStatChart.');
}
const queryParameters: any = {};
if (requestParameters.frame !== undefined) {
queryParameters['frame'] = requestParameters.frame;
}
if (requestParameters.from !== undefined) {
queryParameters['from'] = requestParameters.from;
}
if (requestParameters.to !== undefined) {
queryParameters['to'] = requestParameters.to;
}
const headerParameters: runtime.HTTPHeaders = {};
const response = await this.request({
path: `/chart/validator_stat/{account_id}`.replace(`{${"account_id"}}`, encodeURIComponent(String(requestParameters.accountId))),
method: 'GET',
headers: headerParameters,
query: queryParameters,
});
return new runtime.JSONApiResponse(response, (jsonValue) => jsonValue.map(ValidatorStatFromJSON));
}
/**
*/
async getValidatorStatChart(requestParameters: GetValidatorStatChartRequest): Promise<Array<ValidatorStat>> {
const response = await this.getValidatorStatChartRaw(requestParameters);
return await response.value();
}
/**
*/
async getValidatorsListRaw(requestParameters: GetValidatorsListRequest): Promise<runtime.ApiResponse<Array<ValidatorRow>>> {
const queryParameters: any = {};
if (requestParameters.limit !== undefined) {
queryParameters['limit'] = requestParameters.limit;
}
if (requestParameters.offset !== undefined) {
queryParameters['offset'] = requestParameters.offset;
}
const headerParameters: runtime.HTTPHeaders = {};
const response = await this.request({
path: `/data/validators`,
method: 'GET',
headers: headerParameters,
query: queryParameters,
});
return new runtime.JSONApiResponse(response, (jsonValue) => jsonValue.map(ValidatorRowFromJSON));
}
/**
*/
async getValidatorsList(requestParameters: GetValidatorsListRequest): Promise<Array<ValidatorRow>> {
const response = await this.getValidatorsListRaw(requestParameters);
return await response.value();
}
}
/**
* @export
* @enum {string}
*/
export enum GetAccountBalanceChartFrameEnum {
D = 'D'
}
/**
* @export
* @enum {string}
*/
export enum GetAccountsListSortColumnEnum {
CreatedAt = 'created_at',
GeneralBalance = 'general_balance',
EscrowBalance = 'escrow_balance',
EscrowShare = 'escrow_share',
OperationsAmount = 'operations_amount'
}
/**
* @export
* @enum {string}
*/
export enum GetAccountsListSortSideEnum {
Asc = 'asc',
Desc = 'desc'
}
/**
* @export
* @enum {string}
*/
export enum GetValidatorStatChartFrameEnum {
D = 'D'
} | the_stack |
import { useSelector } from "react-redux";
import { AppState } from "../AppState";
import { Constants as C } from "../Constants";
import { NodeActionType } from "../enums/NodeActionType";
import { TypeHandlerIntf } from "../intf/TypeHandlerIntf";
import * as J from "../JavaIntf";
import { PubSub } from "../PubSub";
import { Singletons } from "../Singletons";
import { Comp } from "../widget/base/Comp";
import { Button } from "../widget/Button";
import { ButtonBar } from "../widget/ButtonBar";
import { Checkbox } from "../widget/Checkbox";
import { Div } from "../widget/Div";
import { Icon } from "../widget/Icon";
import { IconButton } from "../widget/IconButton";
import { Span } from "../widget/Span";
let S: Singletons;
PubSub.sub(C.PUBSUB_SingletonsReady, (ctx: Singletons) => {
S = ctx;
});
/* General Widget that doesn't fit any more reusable or specific category other than a plain Div, but inherits capability of Comp class */
export class NodeCompButtonBar extends Div {
constructor(public node: J.NodeInfo, public allowNodeMove: boolean, private level: number, private extraButtons: IconButton[], private extraClass: string) {
super(null, {
id: "NodeCompButtonBar_" + node.id,
className: "marginLeft " + extraClass
});
}
preRender(): void {
let state: AppState = useSelector((state: AppState) => state);
let node = this.node;
if (!node) {
this.setChildren(null);
return;
}
let encIcon: Icon;
let sharedIcon: Icon;
let openButton: Button;
let selButton: Checkbox;
let createSubNodeButton: Button;
let editNodeButton: Button;
let insertNodeButton: Button;
let cutNodeButton: Icon;
let moveNodeUpButton: Icon;
let moveNodeDownButton: Icon;
let deleteNodeButton: Icon;
let pasteButtons: Span;
let isPageRootNode = state.node && this.node.id === state.node.id;
let typeHandler: TypeHandlerIntf = S.plugin.getTypeHandler(node.type);
let editingAllowed = S.edit.isEditAllowed(node, state);
let deleteAllowed = false;
let editableNode = true;
if (state.isAdminUser) {
editingAllowed = true;
deleteAllowed = true;
editableNode = true;
}
else if (typeHandler) {
if (editingAllowed) {
editingAllowed = typeHandler.allowAction(NodeActionType.editNode, node, state);
deleteAllowed = typeHandler.allowAction(NodeActionType.delete, node, state);
editableNode = typeHandler.allowAction(NodeActionType.editNode, node, state);
}
}
else {
// bug fix. this case was not covered.
if (editingAllowed) {
deleteAllowed = true;
}
}
/* putting this logic separate from setters above, but this is because we don't allow the actual page root
to be deleted WHILE you're looking at it */
if (isPageRootNode) {
deleteAllowed = false;
}
if (S.props.isEncrypted(node)) {
encIcon = new Icon({
className: "fa fa-lock fa-lg rowIcon",
title: "Node is Encrypted."
});
}
/* DO NOT DELETE
todo-2: need to make this if condition:
if ((state.isAdminUser || S.props.isMine(node, state)) && S.props.isShared(node)) {
show cause a clickable link to show up on the "shared to: " text
to run the editNodeSharing()
(I may bring this back eventually, but for now the fact that the sharing is being presented
in the header of each node we don't need this icon and popup text )
if (S.props.isShared(node)) {
let sharingNames = S.util.getSharingNames(node, true);
sharedIcon = new Icon({
className: "fa fa-share-alt fa-lg rowIcon",
onClick: () => S.share.editNodeSharing(state, node),
title: "Shared to:\n" + sharingNames
});
}
*/
let isInlineChildren = !!S.props.getNodePropVal(J.NodeProp.INLINE_CHILDREN, node);
/*
We always enable for fs:folder, to that by clicking to open a folder that will cause the server to re-check and see if there are
truly any files in there or not because we really cannot possibly know until we look. The only way to make this Open button
ONLY show when there ARE truly children fore sure would be to force a check of the file system for every folder type that is ever rendered
on a page and we don't want to burn that much CPU just to prevent empty-folders from being explored. Empty folders are rare.
*/
if (node.hasChildren && !isPageRootNode &&
// If children are shown inline, no need to allow 'open' button in this case unless we're in edit mode
(!isInlineChildren || state.userPreferences.editMode)) {
openButton = new Button(null, S.nav.openNodeById, {
iconclass: "fa fa-folder-open",
nid: node.id,
title: "Open Node to access its children"
}, "btn-primary");
}
/*
* If in edit mode we always at least create the potential (buttons) for a user to insert content, and if
* they don't have privileges the server side security will let them know. In the future we can add more
* intelligence to when to show these buttons or not.
*/
if (state.userPreferences.editMode) {
let checkboxForEdit = editingAllowed && (state.isAdminUser || S.render.allowAction(typeHandler, NodeActionType.editNode, node, state));
let checkboxForDelete = state.isAdminUser || deleteAllowed;
if ((checkboxForEdit || checkboxForDelete) &&
// no need to ever select home node
node.id !== state.homeNodeId) {
selButton = new Checkbox(null, {
title: "Select Node for multi-node functions."
}, {
setValue: (checked: boolean): void => {
if (checked) {
state.selectedNodes.add(node.id);
} else {
state.selectedNodes.delete(node.id);
}
},
getValue: (): boolean => {
return state.selectedNodes.has(node.id);
}
}, true);
}
let insertAllowed = true;
// if this is our own account node, we can always leave insertAllowed=true
if (state.homeNodeId !== node.id) {
if (typeHandler) {
insertAllowed = state.isAdminUser || typeHandler.allowAction(NodeActionType.insert, node, state);
}
}
let editInsertAllowed = S.edit.isInsertAllowed(node, state);
if (C.NEW_ON_TOOLBAR && insertAllowed && editInsertAllowed && !state.editNode) {
createSubNodeButton = new Button(null, S.edit.newSubNode, {
iconclass: "fa fa-plus",
nid: node.id,
title: "Create new Node (as child of this node)"
});
}
if (C.INS_ON_TOOLBAR) {
// todo-1: this button should have same enablement as "new" button, on the page root ???
insertNodeButton = new Button("Ins", S.edit.toolbarInsertNode, {
title: "Insert new Node at this location.",
nid: node.id
});
}
let userCanPaste = S.props.isMine(node, state) || state.isAdminUser || node.id === state.homeNodeId;
if (editingAllowed) {
if (editableNode && !state.editNode) {
editNodeButton = new Button(null, S.edit.runEditNodeByClick, {
iconclass: "fa fa-edit",
title: "Edit Node",
nid: node.id
});
}
if (!isPageRootNode && node.type !== J.NodeType.REPO_ROOT && !state.nodesToMove) {
cutNodeButton = new Icon({
className: "fa fa-cut fa-lg buttonBarIcon",
title: "Cut selected Node(s) to paste elsewhere.",
nid: node.id,
onClick: S.edit.cutSelNodes
});
}
if (C.MOVE_UPDOWN_ON_TOOLBAR && this.allowNodeMove) {
if (node.logicalOrdinal > 0) {
moveNodeUpButton = new Icon({
className: "fa fa-arrow-up buttonBarIcon",
title: "Move Node up one position (higher)",
nid: node.id,
onClick: S.edit.moveNodeUp
});
}
if (!node.lastChild && state.node.children && state.node.children.length > 1) {
moveNodeDownButton = new Icon({
className: "fa fa-arrow-down buttonBarIcon",
title: "Move Node down one position (lower)",
nid: node.id,
onClick: S.edit.moveNodeDown
});
}
}
}
if (deleteAllowed) {
// not user's account node!
if (node.id !== state.homeNodeId) {
deleteNodeButton = new Icon({
className: "fa fa-trash fa-lg buttonBarIcon",
title: "Delete selected nodes",
nid: node.id,
onClick: S.edit.deleteSelNodes
});
}
}
if (!!state.nodesToMove && userCanPaste) {
pasteButtons = new Span(null, { className: "float-right marginLeft" }, [
new Button("Paste Inside",
S.edit.pasteSelNodesInside, { nid: node.id }, "btn-secondary pasteButton"),
node.id !== state.homeNodeId
? new Button("Paste Here", S.edit.pasteSelNodes_InlineAbove, { nid: node.id }, "btn-secondary pasteButton") : null
]);
}
}
let searchButton: Icon = null;
let timelineButton: Icon = null;
let nodeFeedButton: Icon = null;
let upLevelButton: IconButton;
let prevButton: IconButton;
let nextButton: IconButton;
if (isPageRootNode) {
if (state.node && this.node.id === state.node.id) {
if (S.nav.parentVisibleToUser(state)) {
upLevelButton = new IconButton("fa-folder", "Up", {
nid: node.id,
/* For onclick functions I need a new approach for some (not all) where I can get by
with using a function that accepts no arguments but does the trick of retrieving the single ID parameter
directly off the DOM */
onClick: S.nav.navUpLevelClick,
title: "Go to Parent Node"
}, "btn-primary");
}
if (!S.nav.displayingRepositoryRoot(state)) {
prevButton = new IconButton("fa-chevron-circle-left", null, {
onClick: S.nav.navToPrev,
title: "Go to Previous Node"
});
nextButton = new IconButton("fa-chevron-circle-right", null, {
onClick: S.nav.navToNext,
title: "Go to Next Node"
});
}
}
}
if (isPageRootNode || (node.hasChildren && state.userPreferences.editMode)) {
searchButton = new Icon({
className: "fa fa-search fa-lg buttonBarIcon",
title: "Search underneath Node",
nid: node.id,
onClick: S.nav.runSearch
});
timelineButton = new Icon({
className: "fa fa-clock-o fa-lg buttonBarIcon",
title: "View Timeline (by Mod Time)",
nid: node.id,
onClick: S.nav.runTimeline
});
// nodeFeedButton = new Icon({
// className: "fa fa-comments fa-lg buttonBarIcon",
// title: "Open a Feed View of this Node",
// nid: node.id,
// onClick: S.nav.openNodeFeed
// });
}
let btnArray: Comp[] = [openButton, upLevelButton, insertNodeButton, createSubNodeButton, editNodeButton, prevButton, nextButton,
new Span(null, { className: "float-right" }, [moveNodeUpButton, //
moveNodeDownButton, cutNodeButton, deleteNodeButton, nodeFeedButton, searchButton, timelineButton, pasteButtons])];
if (this.extraButtons) {
btnArray = btnArray.concat(this.extraButtons);
}
let buttonBar = new ButtonBar(btnArray, null, "marginLeftIfNotFirst");
if (buttonBar && !buttonBar.hasChildren()) {
buttonBar = null;
}
this.setChildren([selButton, encIcon, sharedIcon, buttonBar]);
}
} | the_stack |
import { ChangeDetectorRef, Component, Input, OnDestroy, OnInit } from '@angular/core';
import { ActivatedRoute, Router } from '@angular/router';
import { BehaviorSubject, from as observableFrom, Observable, Subscription } from 'rxjs';
import {
concatMap,
distinctUntilChanged,
filter,
map,
reduce,
scan,
startWith,
take
} from 'rxjs/operators';
import { TranslateService } from '@ngx-translate/core';
import { ResourcePolicyService } from '../../core/resource-policy/resource-policy.service';
import {
getFirstSucceededRemoteDataPayload,
getFirstSucceededRemoteDataWithNotEmptyPayload,
getAllSucceededRemoteData
} from '../../core/shared/operators';
import { ResourcePolicy } from '../../core/resource-policy/models/resource-policy.model';
import { DSONameService } from '../../core/breadcrumbs/dso-name.service';
import { Group } from '../../core/eperson/models/group.model';
import { GroupDataService } from '../../core/eperson/group-data.service';
import { hasValue, isEmpty, isNotEmpty } from '../empty.util';
import { EPerson } from '../../core/eperson/models/eperson.model';
import { EPersonDataService } from '../../core/eperson/eperson-data.service';
import { RequestService } from '../../core/data/request.service';
import { NotificationsService } from '../notifications/notifications.service';
import { dateToString, stringToNgbDateStruct } from '../date.util';
import { followLink } from '../utils/follow-link-config.model';
import { ACCESS_CONTROL_MODULE_PATH } from '../../app-routing-paths';
import { GROUP_EDIT_PATH } from '../../access-control/access-control-routing-paths';
interface ResourcePolicyCheckboxEntry {
id: string;
policy: ResourcePolicy;
checked: boolean;
}
@Component({
selector: 'ds-resource-policies',
styleUrls: ['./resource-policies.component.scss'],
templateUrl: './resource-policies.component.html'
})
/**
* Component that shows the policies for given resource
*/
export class ResourcePoliciesComponent implements OnInit, OnDestroy {
/**
* The resource UUID
* @type {string}
*/
@Input() public resourceUUID: string;
/**
* The resource type (e.g. 'item', 'bundle' etc) used as key to build automatically translation label
* @type {string}
*/
@Input() public resourceType: string;
/**
* A boolean representing if component is active
* @type {boolean}
*/
private isActive: boolean;
/**
* A boolean representing if a submission delete operation is pending
* @type {BehaviorSubject<boolean>}
*/
private processingDelete$ = new BehaviorSubject<boolean>(false);
/**
* The list of policies for given resource
* @type {BehaviorSubject<ResourcePolicyCheckboxEntry[]>}
*/
private resourcePoliciesEntries$: BehaviorSubject<ResourcePolicyCheckboxEntry[]> =
new BehaviorSubject<ResourcePolicyCheckboxEntry[]>([]);
/**
* Array to track all subscriptions and unsubscribe them onDestroy
* @type {Array}
*/
private subs: Subscription[] = [];
/**
* Initialize instance variables
*
* @param {ChangeDetectorRef} cdr
* @param {DSONameService} dsoNameService
* @param {EPersonDataService} ePersonService
* @param {GroupDataService} groupService
* @param {NotificationsService} notificationsService
* @param {RequestService} requestService
* @param {ResourcePolicyService} resourcePolicyService
* @param {ActivatedRoute} route
* @param {Router} router
* @param {TranslateService} translate
*/
constructor(
private cdr: ChangeDetectorRef,
private dsoNameService: DSONameService,
private ePersonService: EPersonDataService,
private groupService: GroupDataService,
private notificationsService: NotificationsService,
private requestService: RequestService,
private resourcePolicyService: ResourcePolicyService,
private route: ActivatedRoute,
private router: Router,
private translate: TranslateService
) {
}
/**
* Initialize the component, setting up the resource's policies
*/
ngOnInit(): void {
this.isActive = true;
this.initResourcePolicyLIst();
}
/**
* Check if there are any selected resource's policies to be deleted
*
* @return {Observable<boolean>}
*/
canDelete(): Observable<boolean> {
return observableFrom(this.resourcePoliciesEntries$.value).pipe(
filter((entry: ResourcePolicyCheckboxEntry) => entry.checked),
reduce((acc: any, value: any) => [...acc, value], []),
map((entries: ResourcePolicyCheckboxEntry[]) => isNotEmpty(entries)),
distinctUntilChanged()
);
}
/**
* Delete the selected resource's policies
*/
deleteSelectedResourcePolicies(): void {
this.processingDelete$.next(true);
const policiesToDelete: ResourcePolicyCheckboxEntry[] = this.resourcePoliciesEntries$.value
.filter((entry: ResourcePolicyCheckboxEntry) => entry.checked);
this.subs.push(
observableFrom(policiesToDelete).pipe(
concatMap((entry: ResourcePolicyCheckboxEntry) => this.resourcePolicyService.delete(entry.policy.id)),
scan((acc: any, value: any) => [...acc, value], []),
filter((results: boolean[]) => results.length === policiesToDelete.length),
take(1),
).subscribe((results: boolean[]) => {
const failureResults = results.filter((result: boolean) => !result);
if (isEmpty(failureResults)) {
this.notificationsService.success(null, this.translate.get('resource-policies.delete.success.content'));
} else {
this.notificationsService.error(null, this.translate.get('resource-policies.delete.failure.content'));
}
this.requestService.setStaleByHrefSubstring(this.resourceUUID);
this.processingDelete$.next(false);
})
);
}
/**
* Returns a date in simplified format (YYYY-MM-DD).
*
* @param date
* @return a string with formatted date
*/
formatDate(date: string): string {
return isNotEmpty(date) ? dateToString(stringToNgbDateStruct(date)) : '';
}
/**
* Return the ePerson's name which the given policy is linked to
*
* @param policy The resource policy
*/
getEPersonName(policy: ResourcePolicy): Observable<string> {
// TODO to be reviewed when https://github.com/DSpace/dspace-angular/issues/644 will be resolved
// return this.ePersonService.findByHref(policy._links.eperson.href).pipe(
return policy.eperson.pipe(
filter(() => this.isActive),
getFirstSucceededRemoteDataWithNotEmptyPayload(),
map((eperson: EPerson) => this.dsoNameService.getName(eperson)),
startWith('')
);
}
/**
* Return the group's name which the given policy is linked to
*
* @param policy The resource policy
*/
getGroupName(policy: ResourcePolicy): Observable<string> {
// TODO to be reviewed when https://github.com/DSpace/dspace-angular/issues/644 will be resolved
// return this.groupService.findByHref(policy._links.group.href).pipe(
return policy.group.pipe(
filter(() => this.isActive),
getFirstSucceededRemoteDataWithNotEmptyPayload(),
map((group: Group) => this.dsoNameService.getName(group)),
startWith('')
);
}
/**
* Return all resource's policies
*
* @return an observable that emits all resource's policies
*/
getResourcePolicies(): Observable<ResourcePolicyCheckboxEntry[]> {
return this.resourcePoliciesEntries$.asObservable();
}
/**
* Check whether the given policy is linked to a ePerson
*
* @param policy The resource policy
* @return an observable that emits true when the policy is linked to a ePerson, false otherwise
*/
hasEPerson(policy): Observable<boolean> {
// TODO to be reviewed when https://github.com/DSpace/dspace-angular/issues/644 will be resolved
// return this.ePersonService.findByHref(policy._links.eperson.href).pipe(
return policy.eperson.pipe(
filter(() => this.isActive),
getFirstSucceededRemoteDataPayload(),
map((eperson: EPerson) => isNotEmpty(eperson)),
startWith(false)
);
}
/**
* Check whether the given policy is linked to a group
*
* @param policy The resource policy
* @return an observable that emits true when the policy is linked to a group, false otherwise
*/
hasGroup(policy): Observable<boolean> {
// TODO to be reviewed when https://github.com/DSpace/dspace-angular/issues/644 will be resolved
// return this.groupService.findByHref(policy._links.group.href).pipe(
return policy.group.pipe(
filter(() => this.isActive),
getFirstSucceededRemoteDataPayload(),
map((group: Group) => isNotEmpty(group)),
startWith(false)
);
}
/**
* Initialize the resource's policies list
*/
initResourcePolicyLIst() {
this.subs.push(this.resourcePolicyService.searchByResource(this.resourceUUID, null, false, true,
followLink('eperson'), followLink('group')).pipe(
filter(() => this.isActive),
getAllSucceededRemoteData()
).subscribe((result) => {
const entries = result.payload.page.map((policy: ResourcePolicy) => ({
id: policy.id,
policy: policy,
checked: false
}));
this.resourcePoliciesEntries$.next(entries);
// TODO detectChanges still needed?
this.cdr.detectChanges();
}));
}
/**
* Return a boolean representing if a delete operation is pending
*
* @return {Observable<boolean>}
*/
isProcessingDelete(): Observable<boolean> {
return this.processingDelete$.asObservable();
}
/**
* Redirect to resource policy creation page
*/
redirectToResourcePolicyCreatePage(): void {
this.router.navigate([`./create`], {
relativeTo: this.route,
queryParams: {
policyTargetId: this.resourceUUID,
targetType: this.resourceType
}
});
}
/**
* Redirect to resource policy editing page
*
* @param policy The resource policy
*/
redirectToResourcePolicyEditPage(policy: ResourcePolicy): void {
this.router.navigate([`./edit`], {
relativeTo: this.route,
queryParams: {
policyId: policy.id
}
});
}
/**
* Redirect to group edit page
*
* @param policy The resource policy
*/
redirectToGroupEditPage(policy: ResourcePolicy): void {
this.subs.push(
this.groupService.findByHref(policy._links.group.href, false).pipe(
filter(() => this.isActive),
getFirstSucceededRemoteDataPayload(),
map((group: Group) => group.id)
).subscribe((groupUUID) => {
this.router.navigate([ACCESS_CONTROL_MODULE_PATH, GROUP_EDIT_PATH, groupUUID]);
})
);
}
/**
* Select/unselect all checkbox in the list
*/
selectAllCheckbox(event: any): void {
const checked = event.target.checked;
this.resourcePoliciesEntries$.value.forEach((entry: ResourcePolicyCheckboxEntry) => entry.checked = checked);
}
/**
* Select/unselect checkbox
*/
selectCheckbox(policyEntry: ResourcePolicyCheckboxEntry, checked: boolean) {
policyEntry.checked = checked;
}
/**
* Unsubscribe from all subscriptions
*/
ngOnDestroy(): void {
this.isActive = false;
this.resourcePoliciesEntries$ = null;
this.subs
.filter((subscription) => hasValue(subscription))
.forEach((subscription) => subscription.unsubscribe());
}
} | the_stack |
import ndarray, { NdArray } from "ndarray";
import prefixSum from "ndarray-prefix-sum";
import { Dimension, View, View1D, View2D, Views } from "../api";
import { Interval } from "../basic";
import { numBins, stepSize } from "../util";
import { BinConfig } from "./../api";
import { CUM_ARR_TYPE, HIST_TYPE } from "./../consts";
import { AsyncIndex, DataBase } from "./db";
interface Bin {
select: string;
where: string;
}
export abstract class SQLDB<V extends string, D extends string>
implements DataBase<V, D>
{
public readonly blocking: boolean = false;
constructor(
private readonly table: string,
private readonly nameMap?: Map<D, string>
) {}
public abstract initialize();
protected abstract query(q: string): Promise<Iterable<Record<string, any>>>;
private getName(dimension: D) {
return this.nameMap?.get(dimension) ?? dimension;
}
private binSQL(dimension: D, binConfig: BinConfig) {
const field = this.getName(dimension);
return {
select: `cast(
(${field} - cast(${binConfig.start} as float))
/ cast(${binConfig.step} as float)
as int)`,
where: `${field} BETWEEN ${binConfig.start} AND ${binConfig.stop}`
};
}
private binSQLPixel(dimension: D, binConfig: BinConfig, pixels?: number) {
const step =
pixels !== undefined ? stepSize(binConfig, pixels) : binConfig.step;
const start = binConfig.start;
return this.binSQL(dimension, { ...binConfig, start, step });
}
public async length(): Promise<number> {
const result = await this.query(
`SELECT count(*) AS cnt FROM ${this.table}`
);
return result[Symbol.iterator]().next().value["cnt"];
}
public async histogram(
dimension: Dimension<D>,
brushes?: Map<D, Interval<number>>
) {
const bin = dimension.binConfig!;
const binCount = numBins(bin);
const bSql = this.binSQL(dimension.name, bin);
const noBrush = ndarray(new HIST_TYPE(binCount));
const hasBrushes = brushes && brushes.size > 0;
const hist = hasBrushes ? ndarray(new HIST_TYPE(binCount)) : noBrush;
const result = await this.query(`
SELECT
${bSql.select} AS key,
count(*) AS cnt
FROM ${this.table}
WHERE ${bSql.where}
GROUP BY key
`);
for (const { key, cnt } of result) {
noBrush.set(key, cnt);
}
if (hasBrushes) {
const where = [...this.getWhereClauses(brushes!).values()].join(" AND ");
const result = await this.query(`
SELECT
${bSql.select} AS key,
count(*) AS cnt
FROM ${this.table}
WHERE ${bSql.where} AND ${where}
GROUP BY key
`);
for (const { key, cnt } of result) {
hist.set(key, cnt);
}
}
return {
hist,
noBrush
};
}
public async heatmap(dimensions: [Dimension<D>, Dimension<D>]) {
const [binX, binY] = dimensions.map(d => d.binConfig!);
const [numBinsX, numBinsY] = [binX, binY].map(numBins);
const bSqlX = this.binSQL(dimensions[0].name, binX);
const bSqlY = this.binSQL(dimensions[1].name, binY);
const heat = ndarray(new HIST_TYPE(numBinsX * numBinsY), [
numBinsX,
numBinsY
]);
const result = await this.query(`
SELECT
${bSqlX.select} AS keyX,
${bSqlY.select} AS keyY,
count(*) AS cnt
FROM ${this.table}
WHERE
${bSqlX.where} AND ${bSqlY.where}
GROUP BY keyX, keyY
`);
for (const { keyX, keyY, cnt } of result) {
heat.set(keyX, keyY, cnt);
}
return heat;
}
private getWhereClauses(brushes: Map<D, Interval<number>>) {
const filters = new Map<D, string>();
for (const [dimension, extent] of brushes) {
const field = this.getName(dimension);
filters.set(dimension, `${field} BETWEEN ${extent[0]} AND ${extent[1]}`);
}
return filters;
}
public async cubeSlice1D(
view: View<D>,
filters: Map<D, string>,
binActive: Bin,
numPixels: number
) {
let hists: NdArray;
let noBrush: NdArray;
const relevantFilters = new Map(filters);
if (view.type === "0D") {
// use all filters
} else if (view.type === "1D") {
relevantFilters.delete(view.dimension.name);
} else {
relevantFilters.delete(view.dimensions[0].name);
relevantFilters.delete(view.dimensions[1].name);
}
const where = [...relevantFilters.values()].join(" AND ");
let query: string;
const select = `CASE
WHEN ${binActive.where}
THEN ${binActive.select}
ELSE -1 END AS keyActive,
count(*) AS cnt`;
if (view.type === "0D") {
hists = ndarray(new CUM_ARR_TYPE(numPixels));
noBrush = ndarray(new HIST_TYPE(1), [1]);
query = `
SELECT
${select}
FROM ${this.table}
${where ? `WHERE ${where}` : ""}
GROUP BY keyActive`;
} else if (view.type === "1D") {
const dim = view.dimension;
const binConfig = dim.binConfig!;
const bin = this.binSQL(dim.name, binConfig);
const binCount = numBins(binConfig);
hists = ndarray(new CUM_ARR_TYPE(numPixels * binCount), [
numPixels,
binCount
]);
noBrush = ndarray(new HIST_TYPE(binCount), [binCount]);
query = `
SELECT
${select},
${bin.select} AS key
FROM ${this.table}
WHERE ${bin.where} ${where ? `AND ${where}` : ""}
GROUP BY keyActive, key`;
} else {
const dimensions = view.dimensions;
const binConfigs = dimensions.map(d => d.binConfig!);
const [numBinsX, numBinsY] = binConfigs.map(numBins);
const [binX, binY] = [0, 1].map(i =>
this.binSQL(dimensions[i].name, binConfigs[i])
);
hists = ndarray(new CUM_ARR_TYPE(numPixels * numBinsX * numBinsY), [
numPixels,
numBinsX,
numBinsY
]);
noBrush = ndarray(new HIST_TYPE(numBinsX * numBinsY), [
numBinsX,
numBinsY
]);
query = `
SELECT
${select},
${binX.select} as keyX,
${binY.select} as keyY
FROM ${this.table}
WHERE ${binX.where} AND ${binY.where} ${where ? `AND ${where}` : ""}
GROUP BY keyActive, keyX, keyY`;
}
const res = await this.query(query);
if (view.type === "0D") {
for (const { keyActive, cnt } of res) {
if (keyActive >= 0) {
hists.set(keyActive, cnt);
}
noBrush.data[0] += cnt;
}
prefixSum(hists);
} else if (view.type === "1D") {
for (const { keyActive, key, cnt } of res) {
if (keyActive >= 0) {
hists.set(keyActive, key, cnt);
}
noBrush.data[noBrush.index(key)] += cnt;
}
// compute cumulative sums
for (let x = 0; x < hists.shape[1]; x++) {
prefixSum(hists.pick(null, x));
}
} else if (view.type === "2D") {
for (const { keyActive, keyX, keyY, cnt } of res) {
if (keyActive >= 0) {
hists.set(keyActive, keyX, keyY, cnt);
}
noBrush.data[noBrush.index(keyX, keyY)] += cnt;
}
// compute cumulative sums
for (let x = 0; x < hists.shape[1]; x++) {
for (let y = 0; y < hists.shape[2]; y++) {
prefixSum(hists.pick(null, x, y));
}
}
}
return { hists, noBrush };
}
public loadData1D(
activeView: View1D<D>,
pixels: number,
views: Views<V, D>,
brushes: Map<D, Interval<number>>
) {
const t0 = performance.now();
const filters = this.getWhereClauses(brushes);
const cubes: AsyncIndex<V> = new Map();
const activeDim = activeView.dimension;
const binActive = this.binSQLPixel(
activeDim.name,
activeDim.binConfig!,
pixels
);
const numPixels = pixels + 1; // extending by one pixel so we can compute the right diff later
const promises: Promise<any>[] = [];
for (const [name, view] of views) {
const slice = this.cubeSlice1D(view, filters, binActive, numPixels);
promises.push(slice);
cubes.set(name, slice);
}
Promise.all(promises).then(() => {
console.info(`Build index: ${performance.now() - t0}ms`);
});
return cubes;
}
public async cubeSlice2D(
view: View<D>,
filters: Map<D, string>,
binActiveX: Bin,
binActiveY: Bin,
numPixelsX: number,
numPixelsY: number
) {
let hists: NdArray;
let noBrush: NdArray;
const relevantFilters = new Map(filters);
if (view.type === "0D") {
// use all filters
} else if (view.type === "1D") {
relevantFilters.delete(view.dimension.name);
} else {
relevantFilters.delete(view.dimensions[0].name);
relevantFilters.delete(view.dimensions[1].name);
}
const where = [...relevantFilters.values()].join(" AND ");
let query: string;
const select = `CASE
WHEN ${binActiveX.where} AND ${binActiveY.where}
THEN ${binActiveX.select}
ELSE -1 END AS keyActiveX,
CASE
WHEN ${binActiveX.where} AND ${binActiveY.where}
THEN ${binActiveY.select}
ELSE -1 END AS keyActiveY,
count(*) AS cnt`;
if (view.type === "0D") {
hists = ndarray(new CUM_ARR_TYPE(numPixelsX * numPixelsY), [
numPixelsX,
numPixelsY
]);
noBrush = ndarray(new HIST_TYPE(1), [1]);
query = `
SELECT
${select}
FROM ${this.table}
${where ? `WHERE ${where}` : ""}
GROUP BY keyActiveX, keyActiveY`;
} else if (view.type === "1D") {
const dim = view.dimension;
const binConfig = dim.binConfig!;
const bin = this.binSQL(dim.name, binConfig);
const binCount = numBins(binConfig);
hists = ndarray(new CUM_ARR_TYPE(numPixelsX * numPixelsY * binCount), [
numPixelsX,
numPixelsY,
binCount
]);
noBrush = ndarray(new HIST_TYPE(binCount), [binCount]);
query = `
SELECT
${select},
${bin.select} AS key
FROM ${this.table}
WHERE ${bin.where} ${where ? `AND ${where}` : ""}
GROUP BY keyActiveX, keyActiveY, key`;
} else {
const dimensions = view.dimensions;
const binConfigs = dimensions.map(d => d.binConfig!);
const [numBinsX, numBinsY] = binConfigs.map(numBins);
const [binX, binY] = [0, 1].map(i =>
this.binSQL(dimensions[i].name, binConfigs[i])
);
hists = ndarray(
new CUM_ARR_TYPE(numPixelsX * numPixelsY * numBinsX * numBinsY),
[numPixelsX, numPixelsY, numBinsX, numBinsY]
);
noBrush = ndarray(new HIST_TYPE(numBinsX * numBinsY), [
numBinsX,
numBinsY
]);
query = `
SELECT
${select},
${binX.select} AS keyX,
${binY.select} AS keyY
FROM ${this.table}
WHERE ${binX.where} AND ${binY.where} ${where ? `AND ${where}` : ""}
GROUP BY keyActiveX, keyActiveY, keyX, keyY`;
}
const res = await this.query(query);
if (view.type === "0D") {
for (const { keyActiveX, keyActiveY, cnt } of res) {
if (keyActiveX >= 0 && keyActiveY >= 0) {
hists.set(keyActiveX, keyActiveY, cnt);
}
noBrush.data[0] += cnt;
}
prefixSum(hists);
} else if (view.type === "1D") {
for (const { keyActiveX, keyActiveY, key, cnt } of res) {
if (keyActiveX >= 0 && keyActiveY >= 0) {
hists.set(keyActiveX, keyActiveY, key, cnt);
}
noBrush.data[noBrush.index(key)] += cnt;
}
// compute cumulative sums
for (let x = 0; x < hists.shape[2]; x++) {
prefixSum(hists.pick(null, null, x));
}
} else if (view.type === "2D") {
for (const { keyActiveX, keyActiveY, keyX, keyY, cnt } of res) {
if (keyActiveX >= 0 && keyActiveY >= 0) {
hists.set(keyActiveX, keyActiveY, keyX, keyY, cnt);
}
noBrush.data[noBrush.index(keyX, keyY)] += cnt;
}
// compute cumulative sums
for (let x = 0; x < hists.shape[2]; x++) {
for (let y = 0; y < hists.shape[3]; y++) {
prefixSum(hists.pick(null, null, x, y));
}
}
}
return { hists, noBrush };
}
public loadData2D(
activeView: View2D<D>,
pixels: [number, number],
views: Views<V, D>,
brushes: Map<D, Interval<number>>
) {
const t0 = performance.now();
const filters = this.getWhereClauses(brushes);
const cubes: AsyncIndex<V> = new Map();
const [activeDimX, activeDimY] = activeView.dimensions;
const binActiveX = this.binSQLPixel(
activeDimX.name,
activeDimX.binConfig!,
pixels[0]
);
const binActiveY = this.binSQLPixel(
activeDimY.name,
activeDimY.binConfig!,
pixels[1]
);
const [numPixelsX, numPixelsY] = [pixels[0] + 1, pixels[1] + 1];
const promises: Promise<any>[] = [];
for (const [name, view] of views) {
const slice = this.cubeSlice2D(
view,
filters,
binActiveX,
binActiveY,
numPixelsX,
numPixelsY
);
promises.push(slice);
cubes.set(name, slice);
}
Promise.all(promises).then(() => {
console.info(`Build index: ${performance.now() - t0}ms`);
});
return cubes;
}
public async getDimensionExtent(
dimension: Dimension<D>
): Promise<Interval<number>> {
const field = this.getName(dimension.name);
const result = await this.query(`
SELECT
MIN(${field}) AS _min, MAX(${field}) AS _max
FROM ${this.table}
`);
const { _min, _max } = result[0];
return [_min, _max];
}
} | the_stack |
import { Option, Some, None } from "./Option";
import { Vector } from "./Vector";
import { WithEquality, getHashCode,
areEqual, Ordering, ToOrderable } from "./Comparison";
import { contractTrueEquality } from "./Contract";
import { inspect } from "./Value";
import { HashMap } from "./HashMap";
import { HashSet } from "./HashSet";
import { Seq, IterableArray } from "./Seq";
import { Stream } from "./Stream";
import * as SeqHelpers from "./SeqHelpers";
/**
* Holds the "static methods" for [[LinkedList]]
*/
export class LinkedListStatic {
/**
* The empty stream
*/
empty<T>(): LinkedList<T> {
return <EmptyLinkedList<T>>emptyLinkedList;
}
/**
* Create a LinkedList with the elements you give.
*/
of<T>(elt: T, ...elts:T[]): ConsLinkedList<T>;
of<T>(...elts:T[]): LinkedList<T>;
of<T>(...elts:T[]): LinkedList<T> {
return LinkedList.ofIterable(elts);
}
/**
* Build a stream from any iterable, which means also
* an array for instance.
* @param T the item type
*/
ofIterable<T>(elts: Iterable<T>): LinkedList<T> {
const iterator = elts[Symbol.iterator]();
let curItem = iterator.next();
let result: LinkedList<T> = <EmptyLinkedList<T>>emptyLinkedList;
while (!curItem.done) {
result = new ConsLinkedList(curItem.value, result);
curItem = iterator.next();
}
return result.reverse();
}
/**
* Curried type guard for LinkedList.
* Sometimes needed also due to https://github.com/Microsoft/TypeScript/issues/20218
*
* Vector.of(LinkedList.of(1), LinkedList.empty<number>())
* .filter(LinkedList.isEmpty)
* => Vector.of(LinkedList.empty<number>())
*/
isEmpty<T>(l: LinkedList<T>): l is EmptyLinkedList<T> {
return l.isEmpty();
}
/**
* Curried type guard for LinkedList.
* Sometimes needed also due to https://github.com/Microsoft/TypeScript/issues/20218
*
* Vector.of(Stream.of(1), Stream.empty<number>())
* .filter(Stream.isNotEmpty)
* .map(s => s.head().get()+1)
* => Vector.of(2)
*/
isNotEmpty<T>(l: LinkedList<T>): l is ConsLinkedList<T> {
return !l.isEmpty();
}
/**
* Dual to the foldRight function. Build a collection from a seed.
* Takes a starting element and a function.
* It applies the function on the starting element; if the
* function returns None, it stops building the list, if it
* returns Some of a pair, it adds the first element to the result
* and takes the second element as a seed to keep going.
*
* LinkedList.unfoldRight(
* 10, x=>Option.of(x)
* .filter(x => x!==0)
* .map<[number,number]>(x => [x,x-1]))
* => LinkedList.of(10, 9, 8, 7, 6, 5, 4, 3, 2, 1)
*/
unfoldRight<T,U>(seed: T, fn: (x:T)=>Option<[U,T]>): LinkedList<U> {
let nextVal = fn(seed);
let result = <LinkedList<U>><EmptyLinkedList<U>>emptyLinkedList;
while (!nextVal.isNone()) {
result = new ConsLinkedList(
nextVal.get()[0],
result);
nextVal = fn(nextVal.get()[1]);
}
return result.reverse();
}
/**
* Combine any number of iterables you give in as
* parameters to produce a new collection which combines all,
* in tuples. For instance:
*
* LinkedList.zip(LinkedList.of(1,2,3), ["a","b","c"], Vector.of(8,9,10))
* => LinkedList.of([1,"a",8], [2,"b",9], [3,"c",10])
*
* The result collection will have the length of the shorter
* of the input iterables. Extra elements will be discarded.
*
* Also see the non-static version [[ConsLinkedList.zip]], which only combines two
* collections.
* @param A A is the type of the tuple that'll be generated
* (`[number,string,number]` for the code sample)
*/
zip<A extends any[]>(...iterables: IterableArray<A>): LinkedList<A> {
let r = LinkedList.empty<A>();
const iterators = iterables.map(i => i[Symbol.iterator]());
let items = iterators.map(i => i.next());
while (!items.some(item => item.done)) {
r = r.prepend(<any>items.map(item => item.value));
items = iterators.map(i => i.next());
}
return r.reverse();
}
}
/**
* The LinkedList constant allows to call the LinkedList "static" methods
*/
export const LinkedList = new LinkedListStatic();
/**
* A LinkedList is either [[EmptyLinkedList]] or [[ConsLinkedList]]
* "static methods" available through [[LinkedListStatic]]
* @param T the item type
*/
export type LinkedList<T> = EmptyLinkedList<T> | ConsLinkedList<T>;
/**
* EmptyLinkedList is the empty linked list; every non-empty
* linked list also has a pointer to an empty linked list
* after its last element.
* "static methods" available through [[LinkedListStatic]]
* @param T the item type
*/
export class EmptyLinkedList<T> implements Seq<T> {
/**
* @hidden
*/
hasTrueEquality(): boolean {
return SeqHelpers.seqHasTrueEquality<T>(this);
}
/**
* Implementation of the Iterator interface.
*/
[Symbol.iterator](): Iterator<T> {
return {
next(): IteratorResult<T> {
return {
done: true,
value: <any>undefined
};
}
}
}
/**
* @hidden
*/
readonly className: "EmptyLinkedList" = <any>undefined; // https://stackoverflow.com/a/47841595/516188
/**
* View this Some a as LinkedList. Useful to help typescript type
* inference sometimes.
*/
asLinkedList(): LinkedList<T> {
return this;
}
/**
* Get the length of the collection.
*/
length(): number {
return 0;
}
/**
* If the collection contains a single element,
* return Some of its value, otherwise return None.
*/
single(): Option<T> {
return Option.none<T>();
}
/**
* true if the collection is empty, false otherwise.
*/
isEmpty(): this is EmptyLinkedList<T> {
return true;
}
/**
* Get the first value of the collection, if any.
* In this case the list is empty, so returns Option.none
*/
head(): None<T> {
return <None<T>>Option.none<T>();
}
/**
* Get all the elements in the collection but the first one.
* If the collection is empty, return None.
*/
tail(): Option<LinkedList<T>> {
return Option.none<LinkedList<T>>();
}
/**
* Get the last value of the collection, if any.
* returns Option.Some if the collection is not empty,
* Option.None if it's empty.
*/
last(): Option<T> {
return Option.none<T>();
}
/**
* Retrieve the element at index idx.
* Returns an option because the collection may
* contain less elements than the index.
*
* Careful this is going to have poor performance
* on LinkedList, which is not a good data structure
* for random access!
*/
get(idx: number): Option<T> {
return Option.none<T>();
}
/**
* Search for an item matching the predicate you pass,
* return Option.Some of that element if found,
* Option.None otherwise.
*/
find(predicate:(v:T)=>boolean): Option<T> {
return Option.none<T>();
}
/**
* Returns true if the item is in the collection,
* false otherwise.
*/
contains(v:T&WithEquality): boolean {
return false;
}
/**
* Return a new stream keeping only the first n elements
* from this stream.
*/
take(n: number): LinkedList<T> {
return this;
}
/**
* Returns a new collection, discarding the elements
* after the first element which fails the predicate.
*/
takeWhile(predicate: (x:T)=>boolean): LinkedList<T> {
return this;
}
/**
* Returns a new collection, discarding the elements
* after the first element which fails the predicate,
* but starting from the end of the collection.
*
* LinkedList.of(1,2,3,4).takeRightWhile(x => x > 2)
* => LinkedList.of(3,4)
*/
takeRightWhile(predicate:(x:T)=>boolean): LinkedList<T> {
return this;
}
/**
* Returns a new collection with the first
* n elements discarded.
* If the collection has less than n elements,
* returns the empty collection.
*/
drop(n:number): LinkedList<T> {
return this;
}
/**
* Returns a new collection, discarding the first elements
* until one element fails the predicate. All elements
* after that point are retained.
*/
dropWhile(predicate:(x:T)=>boolean): LinkedList<T> {
return this;
}
/**
* Returns a new collection with the last
* n elements discarded.
* If the collection has less than n elements,
* returns the empty collection.
*/
dropRight(n:number): LinkedList<T> {
return this;
}
/**
* Returns a new collection, discarding the last elements
* until one element fails the predicate. All elements
* before that point are retained.
*/
dropRightWhile(predicate:(x:T)=>boolean): LinkedList<T> {
return this;
}
/**
* Reduces the collection to a single value using the
* associative binary function you give. Since the function
* is associative, order of application doesn't matter.
*
* Example:
*
* LinkedList.of(1,2,3).fold(0, (a,b) => a + b);
* => 6
*/
fold(zero:T, fn:(v1:T,v2:T)=>T): T {
return zero;
}
/**
* Reduces the collection to a single value.
* Left-associative.
*
* Example:
*
* Vector.of("a", "b", "c").foldLeft("!", (xs,x) => x+xs);
* => "cba!"
*
* @param zero The initial value
* @param fn A function taking the previous value and
* the current collection item, and returning
* an updated value.
*/
foldLeft<U>(zero: U, fn:(soFar:U,cur:T)=>U): U {
return zero;
}
/**
* Reduces the collection to a single value.
* Right-associative.
*
* Example:
*
* Vector.of("a", "b", "c").foldRight("!", (x,xs) => xs+x);
* => "!cba"
*
* @param zero The initial value
* @param fn A function taking the current collection item and
* the previous value , and returning
* an updated value.
*/
foldRight<U>(zero: U, fn:(cur:T, soFar:U)=>U): U {
return zero;
}
/**
* Combine this collection with the collection you give in
* parameter to produce a new collection which combines both,
* in pairs. For instance:
*
* Vector.of(1,2,3).zip(["a","b","c"])
* => Vector.of([1,"a"], [2,"b"], [3,"c"])
*
* The result collection will have the length of the shorter
* of both collections. Extra elements will be discarded.
*
* Also see [[LinkedListStatic.zip]] (static version which can more than two
* iterables)
*/
zip<U>(other: Iterable<U>): LinkedList<[T,U]> {
return <EmptyLinkedList<[T,U]>>emptyLinkedList;
}
/**
* Combine this collection with the index of the elements
* in it. Handy if you need the index when you map on
* the collection for instance:
*
* LinkedList.of("a","b").zipWithIndex().map(([v,idx]) => v+idx);
* => LinkedList.of("a0", "b1")
*/
zipWithIndex(): LinkedList<[T,number]> {
return <EmptyLinkedList<[T,number]>><any>this;
}
/**
* Reverse the collection. For instance:
*
* LinkedList.of(1,2,3).reverse();
* => LinkedList.of(3,2,1)
*/
reverse(): LinkedList<T> {
return this;
}
/**
* Takes a predicate; returns a pair of collections.
* The first one is the longest prefix of this collection
* which satisfies the predicate, and the second collection
* is the remainder of the collection.
*
* LinkedList.of(1,2,3,4,5,6).span(x => x <3)
* => [LinkedList.of(1,2), LinkedList.of(3,4,5,6)]
*/
span(predicate:(x:T)=>boolean): [LinkedList<T>,LinkedList<T>] {
return [this, this];
}
/**
* Split the collection at a specific index.
*
* LinkedList.of(1,2,3,4,5).splitAt(3)
* => [LinkedList.of(1,2,3), LinkedList.of(4,5)]
*/
splitAt(index:number): [LinkedList<T>,LinkedList<T>] {
return [this, this];
}
/**
* Returns a pair of two collections; the first one
* will only contain the items from this collection for
* which the predicate you give returns true, the second
* will only contain the items from this collection where
* the predicate returns false.
*
* LinkedList.of(1,2,3,4).partition(x => x%2===0)
* => [LinkedList.of(2,4),LinkedList.of(1,3)]
*/
partition<U extends T>(predicate:(v:T)=>v is U): [LinkedList<U>,LinkedList<Exclude<T,U>>];
partition(predicate:(x:T)=>boolean): [LinkedList<T>,LinkedList<T>];
partition<U extends T>(predicate:(v:T)=>boolean): [LinkedList<U>,LinkedList<any>] {
return [LinkedList.empty<U>(), LinkedList.empty<T>()];
}
/**
* Group elements in the collection using a classifier function.
* Elements are then organized in a map. The key is the value of
* the classifier, and in value we get the list of elements
* matching that value.
*
* also see [[ConsLinkedList.arrangeBy]]
*/
groupBy<C>(classifier: (v:T)=>C & WithEquality): HashMap<C,LinkedList<T>> {
return HashMap.empty<C,LinkedList<T>>();
}
/**
* Matches each element with a unique key that you extract from it.
* If the same key is present twice, the function will return None.
*
* also see [[ConsLinkedList.groupBy]]
*/
arrangeBy<K>(getKey: (v:T)=>K&WithEquality): Option<HashMap<K,T>> {
return SeqHelpers.arrangeBy<T,K>(this, getKey);
}
/**
* Randomly reorder the elements of the collection.
*/
shuffle(): LinkedList<T> {
return this;
}
/**
* Append an element at the end of this LinkedList.
* Warning: appending in a loop on a linked list is going
* to be very slow!
*/
append(v:T): LinkedList<T> {
return LinkedList.of(v);
}
/*
* Append multiple elements at the end of this LinkedList.
*/
appendAll(elts:Iterable<T>): LinkedList<T> {
return LinkedList.ofIterable(elts);
}
/**
* Remove multiple elements from a LinkedList
*
* LinkedList.of(1,2,3,4,3,2,1).removeAll([2,4])
* => LinkedList.of(1,3,3,1)
*/
removeAll(elts:Iterable<T&WithEquality>): LinkedList<T> {
return this;
}
/**
* Removes the first element matching the predicate
* (use [[Seq.filter]] to remove all elements matching a predicate)
*/
removeFirst(predicate: (x:T)=>boolean): LinkedList<T> {
return this;
}
/**
* Prepend an element at the beginning of the collection.
*/
prepend(elt: T): LinkedList<T> {
return new ConsLinkedList(elt, this);
}
/**
* Prepend multiple elements at the beginning of the collection.
*/
prependAll(elt: Iterable<T>): LinkedList<T> {
return LinkedList.ofIterable(elt);
}
/**
* Return a new collection where each element was transformed
* by the mapper function you give.
*/
map<U>(mapper:(v:T)=>U): LinkedList<U> {
return <EmptyLinkedList<U>>emptyLinkedList;
}
/**
* Apply the mapper function on every element of this collection.
* The mapper function returns an Option; if the Option is a Some,
* the value it contains is added to the result Collection, if it's
* a None, the value is discarded.
*
* LinkedList.of(1,2,6).mapOption(x => x%2===0 ?
* Option.of(x+1) : Option.none<number>())
* => LinkedList.of(3, 7)
*/
mapOption<U>(mapper:(v:T)=>Option<U>): LinkedList<U> {
return <EmptyLinkedList<U>>emptyLinkedList;
}
/**
* Calls the function you give for each item in the collection,
* your function returns a collection, all the collections are
* concatenated.
* This is the monadic bind.
*/
flatMap<U>(mapper:(v:T)=>LinkedList<U>): LinkedList<U> {
return <EmptyLinkedList<U>>emptyLinkedList;
}
/**
* Returns true if the predicate returns true for all the
* elements in the collection.
*/
allMatch<U extends T>(predicate:(v:T)=>v is U): this is LinkedList<U>;
allMatch(predicate:(v:T)=>boolean): boolean;
allMatch(predicate:(v:T)=>boolean): boolean {
return true;
}
/**
* Returns true if there the predicate returns true for any
* element in the collection.
*/
anyMatch(predicate:(v:T)=>boolean): boolean {
return false;
}
/**
* Call a predicate for each element in the collection,
* build a new collection holding only the elements
* for which the predicate returned true.
*/
filter<U extends T>(predicate:(v:T)=>v is U): LinkedList<U>;
filter(predicate:(v:T)=>boolean): LinkedList<T>;
filter(predicate:(v:T)=>boolean): LinkedList<T> {
return this;
}
/**
* Returns a new collection with elements
* sorted according to the comparator you give.
*
* const activityOrder = ["Writer", "Actor", "Director"];
* LinkedList.of({name:"George", activity: "Director"}, {name:"Robert", activity: "Actor"})
* .sortBy((p1,p2) => activityOrder.indexOf(p1.activity) - activityOrder.indexOf(p2.activity));
* => LinkedList.of({"name":"Robert","activity":"Actor"}, {"name":"George","activity":"Director"})
*
* also see [[ConsLinkedList.sortOn]]
*/
sortBy(compare: (v1:T,v2:T)=>Ordering): LinkedList<T> {
return this;
}
/**
* Give a function associating a number or a string with
* elements from the collection, and the elements
* are sorted according to that value.
*
* LinkedList.of({a:3,b:"b"},{a:1,b:"test"},{a:2,b:"a"}).sortOn(elt=>elt.a)
* => LinkedList.of({a:1,b:"test"},{a:2,b:"a"},{a:3,b:"b"})
*
* You can also sort by multiple criteria, and request 'descending'
* sorting:
*
* LinkedList.of({a:1,b:"b"},{a:1,b:"test"},{a:2,b:"a"}).sortOn(elt=>elt.a,{desc:elt=>elt.b})
* => LinkedList.of({a:1,b:"test"},{a:1,b:"b"},{a:2,b:"a"})
*
* also see [[ConsLinkedList.sortBy]]
*/
sortOn(...getKeys: Array<ToOrderable<T>|{desc:ToOrderable<T>}>): LinkedList<T> {
return this;
}
/**
* Remove duplicate items; elements are mapped to keys, those
* get compared.
*
* LinkedList.of(1,1,2,3,2,3,1).distinctBy(x => x)
* => LinkedList.of(1,2,3)
*/
distinctBy<U>(keyExtractor: (x:T)=>U&WithEquality): LinkedList<T> {
return this;
}
/**
* Call a function for element in the collection.
*/
forEach(fn: (v:T)=>void): LinkedList<T> {
return this;
}
/**
* Reduces the collection to a single value by repeatedly
* calling the combine function.
* No starting value. The order in which the elements are
* passed to the combining function is undetermined.
*/
reduce(combine: (v1:T,v2:T)=>T): Option<T> {
return SeqHelpers.reduce(this, combine);
}
/**
* Compare values in the collection and return the smallest element.
* Returns Option.none if the collection is empty.
*
* also see [[ConsLinkedList.minOn]]
*/
minBy(compare: (v1:T,v2:T)=>Ordering): Option<T> {
return SeqHelpers.minBy(this, compare);
}
/**
* Call the function you give for each value in the collection
* and return the element for which the result was the smallest.
* Returns Option.none if the collection is empty.
*
* LinkedList.of({name:"Joe", age:12}, {name:"Paula", age:6}).minOn(x=>x.age)
* => Option.of({name:"Paula", age:6})
*
* also see [[ConsLinkedList.minBy]]
*/
minOn(getOrderable: ToOrderable<T>): Option<T> {
return SeqHelpers.minOn(this, getOrderable);
}
/**
* Compare values in the collection and return the largest element.
* Returns Option.none if the collection is empty.
*
* also see [[ConsLinkedList.maxOn]]
*/
maxBy(compare: (v1:T,v2:T)=>Ordering): Option<T> {
return SeqHelpers.maxBy(this, compare);
}
/**
* Call the function you give for each value in the collection
* and return the element for which the result was the largest.
* Returns Option.none if the collection is empty.
*
* LinkedList.of({name:"Joe", age:12}, {name:"Paula", age:6}).maxOn(x=>x.age)
* => Option.of({name:"Joe", age:12})
*
* also see [[ConsLinkedList.maxBy]]
*/
maxOn(getOrderable: ToOrderable<T>): Option<T> {
return SeqHelpers.maxOn(this, getOrderable);
}
/**
* Call the function you give for each element in the collection
* and sum all the numbers, return that sum.
* Will return 0 if the collection is empty.
*
* LinkedList.of(1,2,3).sumOn(x=>x)
* => 6
*/
sumOn(getNumber: (v:T)=>number): number {
return SeqHelpers.sumOn(this, getNumber);
}
/**
* Slides a window of a specific size over the sequence.
* Returns a lazy stream so memory use is not prohibitive.
*
* LinkedList.of(1,2,3,4,5,6,7,8).sliding(3)
* => Stream.of(LinkedList.of(1,2,3), LinkedList.of(4,5,6), LinkedList.of(7,8))
*/
sliding(count:number): Stream<ConsLinkedList<T>> {
return <Stream<ConsLinkedList<T>>>SeqHelpers.sliding(this, count);
}
/**
* Apply the function you give to all elements of the sequence
* in turn, keeping the intermediate results and returning them
* along with the final result in a list.
* The last element of the result is the final cumulative result.
*
* LinkedList.of(1,2,3).scanLeft(0, (soFar,cur)=>soFar+cur)
* => LinkedList.of(0,1,3,6)
*/
scanLeft<U>(init:U, fn:(soFar:U,cur:T)=>U): LinkedList<U> {
return LinkedList.of<U>(init);
}
/**
* Apply the function you give to all elements of the sequence
* in turn, keeping the intermediate results and returning them
* along with the final result in a list.
* The first element of the result is the final cumulative result.
*
* LinkedList.of(1,2,3).scanRight(0, (cur,soFar)=>soFar+cur)
* => LinkedList.of(6,5,3,0)
*/
scanRight<U>(init:U, fn:(cur:T,soFar:U)=>U): LinkedList<U> {
return LinkedList.of<U>(init);
}
/**
* Joins elements of the collection by a separator.
* Example:
*
* LinkedList.of(1,2,3).mkString(", ")
* => "1, 2, 3"
*/
mkString(separator: string): string {
return "";
}
/**
* Convert to array.
* Don't do it on an infinite stream!
*/
toArray(): T[] {
return [];
}
/**
* Convert to vector.
* Don't do it on an infinite stream!
*/
toVector(): Vector<T> {
return Vector.empty<T>();
}
/**
* Convert this collection to a map. You give a function which
* for each element in the collection returns a pair. The
* key of the pair will be used as a key in the map, the value,
* as a value in the map. If several values get the same key,
* entries will be lost.
*
* LinkedList.of(1,2,3).toMap(x=>[x.toString(), x])
* => HashMap.of(["1",1], ["2",2], ["3",3])
*/
toMap<K,V>(converter:(x:T)=>[K & WithEquality,V]): HashMap<K,V> {
return HashMap.empty<K,V>();
}
/**
* Convert this collection to a set. Since the elements of the
* Seq may not support equality, you must pass a function returning
* a value supporting equality.
*
* LinkedList.of(1,2,3,3,4).toSet(x=>x)
* => HashSet.of(1,2,3,4)
*/
toSet<K>(converter:(x:T)=>K&WithEquality): HashSet<K> {
return HashSet.empty<K>();
}
/**
* Transform this value to another value type.
* Enables fluent-style programming by chaining calls.
*/
transform<U>(converter:(x:LinkedList<T>)=>U): U {
return converter(this);
}
/**
* Two objects are equal if they represent the same value,
* regardless of whether they are the same object physically
* in memory.
*/
equals(other: LinkedList<T&WithEquality>): boolean {
if (!other) {
return false;
}
return other.isEmpty();
}
/**
* Get a number for that object. Two different values
* may get the same number, but one value must always get
* the same number. The formula can impact performance.
*/
hashCode(): number {
return 1;
}
[inspect](): string {
return this.toString();
}
/**
* Get a human-friendly string representation of that value.
*
* Also see [[ConsLinkedList.mkString]]
*/
toString(): string {
return "LinkedList()";
}
}
/**
* ConsLinkedList holds a value and a pointer to a next element,
* which could be [[ConsLinkedList]] or [[EmptyLinkedList]].
* A ConsLinkedList is basically a non-empty linked list. It will
* contain at least one element.
* "static methods" available through [[LinkedListStatic]]
* @param T the item type
*/
export class ConsLinkedList<T> implements Seq<T> {
/**
* @hidden
*/
readonly className: "ConsLinkedList" = <any>undefined; // https://stackoverflow.com/a/47841595/516188
/**
* @hidden
*/
public constructor(protected value: T, protected _tail: LinkedList<T>) {}
/**
* @hidden
*/
hasTrueEquality(): boolean {
return SeqHelpers.seqHasTrueEquality<T>(this);
}
/**
* View this Some a as LinkedList. Useful to help typescript type
* inference sometimes.
*/
asLinkedList(): LinkedList<T> {
return this;
}
/**
* Implementation of the Iterator interface.
*/
[Symbol.iterator](): Iterator<T> {
let item: LinkedList<T> = this;
return {
next(): IteratorResult<T> {
if (item.isEmpty()) {
return { done: true, value: <any>undefined };
}
const value = item.head().get();
item = item.tail().get();
return {done: false, value};
}
};
}
/**
* Get the length of the collection.
*/
length(): number {
return this.foldLeft(0, (n, ignored) => n + 1);
}
/**
* If the collection contains a single element,
* return Some of its value, otherwise return None.
*/
single(): Option<T> {
return this._tail.isEmpty() ?
Option.of(this.value) :
Option.none<T>();
}
/**
* true if the collection is empty, false otherwise.
*/
isEmpty(): this is EmptyLinkedList<T> {
return false;
}
/**
* Get the first value of the collection, if any.
* In this case the list is not empty, so returns Option.some
*/
head(): Some<T> {
return Option.some(this.value);
}
/**
* Get all the elements in the collection but the first one.
* If the collection is empty, return None.
*/
tail(): Some<LinkedList<T>> {
return Option.some(this._tail);
}
/**
* Get the last value of the collection, if any.
* returns Option.Some if the collection is not empty,
* Option.None if it's empty.
*/
last(): Some<T> {
let curItem: LinkedList<T> = this;
while (true) {
const item = (<ConsLinkedList<T>>curItem).value;
curItem = (<ConsLinkedList<T>>curItem)._tail;
if (curItem.isEmpty()) {
return Option.some(item);
}
}
}
/**
* Retrieve the element at index idx.
* Returns an option because the collection may
* contain less elements than the index.
*
* Careful this is going to have poor performance
* on LinkedList, which is not a good data structure
* for random access!
*/
get(idx: number): Option<T> {
let curItem: LinkedList<T> = this;
let i=0;
while (!curItem.isEmpty()) {
if (i === idx) {
const item = curItem.value;
return Option.of(item);
}
curItem = curItem._tail;
++i;
}
return Option.none<T>();
}
/**
* Search for an item matching the predicate you pass,
* return Option.Some of that element if found,
* Option.None otherwise.
*/
find(predicate:(v:T)=>boolean): Option<T> {
let curItem: LinkedList<T> = this;
while (!curItem.isEmpty()) {
const item = curItem.value;
if (predicate(item)) {
return Option.of(item);
}
curItem = curItem._tail;
}
return Option.none<T>();
}
/**
* Returns true if the item is in the collection,
* false otherwise.
*/
contains(v:T&WithEquality): boolean {
return this.find(x => areEqual(x,v)).isSome();
}
/**
* Return a new stream keeping only the first n elements
* from this stream.
*/
take(n: number): LinkedList<T> {
let result = <LinkedList<T>><EmptyLinkedList<T>>emptyLinkedList;
let curItem: LinkedList<T> = this;
let i = 0;
while (i++ < n && (!curItem.isEmpty())) {
result = new ConsLinkedList(curItem.value, result);
curItem = curItem._tail;
}
return result.reverse();
}
/**
* Returns a new collection, discarding the elements
* after the first element which fails the predicate.
*/
takeWhile(predicate: (x:T)=>boolean): LinkedList<T> {
let result = <LinkedList<T>><EmptyLinkedList<T>>emptyLinkedList;
let curItem: LinkedList<T> = this;
while ((!curItem.isEmpty()) && predicate(curItem.value)) {
result = new ConsLinkedList(curItem.value, result);
curItem =curItem._tail;
}
return result.reverse();
}
/**
* Returns a new collection, discarding the elements
* after the first element which fails the predicate,
* but starting from the end of the collection.
*
* LinkedList.of(1,2,3,4).takeRightWhile(x => x > 2)
* => LinkedList.of(3,4)
*/
takeRightWhile(predicate:(x:T)=>boolean): LinkedList<T> {
return this.reverse().takeWhile(predicate).reverse();
}
/**
* Returns a new collection with the first
* n elements discarded.
* If the collection has less than n elements,
* returns the empty collection.
*/
drop(n:number): LinkedList<T> {
let i = n;
let curItem: LinkedList<T> = this;
while (i-- > 0 && !curItem.isEmpty()) {
curItem = curItem._tail;
}
return curItem;
}
/**
* Returns a new collection, discarding the first elements
* until one element fails the predicate. All elements
* after that point are retained.
*/
dropWhile(predicate:(x:T)=>boolean): LinkedList<T> {
let curItem: LinkedList<T> = this;
while (!curItem.isEmpty() && predicate(curItem.value)) {
curItem = curItem._tail;
}
return curItem;
}
/**
* Returns a new collection with the last
* n elements discarded.
* If the collection has less than n elements,
* returns the empty collection.
*/
dropRight(n:number): LinkedList<T> {
// going twice through the list...
const length = this.length();
return this.take(length-n);
}
/**
* Returns a new collection, discarding the last elements
* until one element fails the predicate. All elements
* before that point are retained.
*/
dropRightWhile(predicate:(x:T)=>boolean): LinkedList<T> {
return this.reverse().dropWhile(predicate).reverse();
}
/**
* Reduces the collection to a single value using the
* associative binary function you give. Since the function
* is associative, order of application doesn't matter.
*
* Example:
*
* LinkedList.of(1,2,3).fold(0, (a,b) => a + b);
* => 6
*/
fold(zero:T, fn:(v1:T,v2:T)=>T): T {
return this.foldLeft(zero, fn);
}
/**
* Reduces the collection to a single value.
* Left-associative.
*
* Example:
*
* Vector.of("a", "b", "c").foldLeft("!", (xs,x) => x+xs);
* => "cba!"
*
* @param zero The initial value
* @param fn A function taking the previous value and
* the current collection item, and returning
* an updated value.
*/
foldLeft<U>(zero: U, fn:(soFar:U,cur:T)=>U): U {
let r = zero;
let curItem: LinkedList<T> = this;
while (!curItem.isEmpty()) {
r = fn(r, curItem.value);
curItem = curItem._tail;
}
return r;
}
/**
* Reduces the collection to a single value.
* Right-associative.
*
* Example:
*
* Vector.of("a", "b", "c").foldRight("!", (x,xs) => xs+x);
* => "!cba"
*
* @param zero The initial value
* @param fn A function taking the current collection item and
* the previous value , and returning
* an updated value.
*/
foldRight<U>(zero: U, fn:(cur:T, soFar:U)=>U): U {
return this.reverse().foldLeft(zero, (xs,x)=>fn(x,xs));
}
/**
* Combine this collection with the collection you give in
* parameter to produce a new collection which combines both,
* in pairs. For instance:
*
* Vector.of(1,2,3).zip(["a","b","c"])
* => Vector.of([1,"a"], [2,"b"], [3,"c"])
*
* The result collection will have the length of the shorter
* of both collections. Extra elements will be discarded.
*
* Also see [[LinkedListStatic.zip]] (static version which can more than two
* iterables)
*/
zip<U>(other: Iterable<U>): LinkedList<[T,U]> {
const otherIterator = other[Symbol.iterator]();
let otherCurItem = otherIterator.next();
let curItem: LinkedList<T> = this;
let result: LinkedList<[T,U]> = <EmptyLinkedList<[T,U]>>emptyLinkedList;
while ((!curItem.isEmpty()) && (!otherCurItem.done)) {
result = new ConsLinkedList(
[curItem.value, otherCurItem.value] as [T,U], result);
curItem = curItem._tail;
otherCurItem = otherIterator.next();
}
return result.reverse();
}
/**
* Combine this collection with the index of the elements
* in it. Handy if you need the index when you map on
* the collection for instance:
*
* LinkedList.of("a","b").zipWithIndex().map(([v,idx]) => v+idx);
* => LinkedList.of("a0", "b1")
*/
zipWithIndex(): LinkedList<[T,number]> {
return <LinkedList<[T,number]>>SeqHelpers.zipWithIndex<T>(this);
}
/**
* Reverse the collection. For instance:
*
* LinkedList.of(1,2,3).reverse();
* => LinkedList.of(3,2,1)
*/
reverse(): LinkedList<T> {
return this.foldLeft(<LinkedList<T>><EmptyLinkedList<T>>emptyLinkedList, (xs,x) => xs.prepend(x));
}
/**
* Takes a predicate; returns a pair of collections.
* The first one is the longest prefix of this collection
* which satisfies the predicate, and the second collection
* is the remainder of the collection.
*
* LinkedList.of(1,2,3,4,5,6).span(x => x <3)
* => [LinkedList.of(1,2), LinkedList.of(3,4,5,6)]
*/
span(predicate:(x:T)=>boolean): [LinkedList<T>,LinkedList<T>] {
let first: LinkedList<T> = <EmptyLinkedList<T>>emptyLinkedList;
let curItem: LinkedList<T> = this;
while ((!curItem.isEmpty()) && predicate(curItem.value)) {
first = new ConsLinkedList(curItem.value, first);
curItem = curItem._tail;
}
return [first.reverse(), curItem];
}
/**
* Split the collection at a specific index.
*
* LinkedList.of(1,2,3,4,5).splitAt(3)
* => [LinkedList.of(1,2,3), LinkedList.of(4,5)]
*/
splitAt(index:number): [LinkedList<T>,LinkedList<T>] {
let first: LinkedList<T> = <EmptyLinkedList<T>>emptyLinkedList;
let curItem: LinkedList<T> = this;
let i = 0;
while (i++ < index && (!curItem.isEmpty())) {
first = new ConsLinkedList(curItem.value, first);
curItem = curItem._tail;
}
return [first.reverse(), curItem];
}
/**
* Returns a pair of two collections; the first one
* will only contain the items from this collection for
* which the predicate you give returns true, the second
* will only contain the items from this collection where
* the predicate returns false.
*
* LinkedList.of(1,2,3,4).partition(x => x%2===0)
* => [LinkedList.of(2,4),LinkedList.of(1,3)]
*/
partition<U extends T>(predicate:(v:T)=>v is U): [LinkedList<U>,LinkedList<Exclude<T,U>>];
partition(predicate:(x:T)=>boolean): [LinkedList<T>,LinkedList<T>];
partition(predicate:(v:T)=>boolean): [LinkedList<T>,LinkedList<T>] {
let fst = LinkedList.empty<T>();
let snd = LinkedList.empty<T>();
let curItem: LinkedList<T> = this;
while (!curItem.isEmpty()) {
if (predicate(curItem.value)) {
fst = new ConsLinkedList(curItem.value, fst);
} else {
snd = new ConsLinkedList(curItem.value, snd);
}
curItem = curItem._tail;
}
return [fst.reverse(), snd.reverse()];
}
/**
* Group elements in the collection using a classifier function.
* Elements are then organized in a map. The key is the value of
* the classifier, and in value we get the list of elements
* matching that value.
*
* also see [[ConsLinkedList.arrangeBy]]
*/
groupBy<C>(classifier: (v:T)=>C & WithEquality): HashMap<C,LinkedList<T>> {
return this.foldLeft(
HashMap.empty<C,LinkedList<T>>(),
(acc: HashMap<C,LinkedList<T>>, v:T) =>
acc.putWithMerge(
classifier(v), LinkedList.of(v),
(v1:LinkedList<T>,v2:LinkedList<T>)=>
v1.prepend(v2.single().getOrThrow())))
.mapValues(l => l.reverse());
}
/**
* Matches each element with a unique key that you extract from it.
* If the same key is present twice, the function will return None.
*
* also see [[ConsLinkedList.groupBy]]
*/
arrangeBy<K>(getKey: (v:T)=>K&WithEquality): Option<HashMap<K,T>> {
return SeqHelpers.arrangeBy<T,K>(this, getKey);
}
/**
* Randomly reorder the elements of the collection.
*/
shuffle(): LinkedList<T> {
return LinkedList.ofIterable<T>(SeqHelpers.shuffle(this.toArray()));
}
/**
* Append an element at the end of this LinkedList.
* Warning: appending in a loop on a linked list is going
* to be very slow!
*/
append(v:T): LinkedList<T> {
return new ConsLinkedList(
this.value,
this._tail.append(v));
}
/*
* Append multiple elements at the end of this LinkedList.
*/
appendAll(elts:Iterable<T>): LinkedList<T> {
return LinkedList.ofIterable(elts).prependAll(<LinkedList<T>>this);
}
/**
* Remove multiple elements from a LinkedList
*
* LinkedList.of(1,2,3,4,3,2,1).removeAll([2,4])
* => LinkedList.of(1,3,3,1)
*/
removeAll(elts:Iterable<T&WithEquality>): LinkedList<T> {
return <LinkedList<T>><any>SeqHelpers.removeAll(this, elts);
}
/**
* Removes the first element matching the predicate
* (use [[Seq.filter]] to remove all elements matching a predicate)
*/
removeFirst(predicate: (x:T)=>boolean): LinkedList<T> {
let curItem: LinkedList<T> = this;
let result: LinkedList<T> = <EmptyLinkedList<T>>emptyLinkedList;
let removed = false;
while (!curItem.isEmpty()) {
if (predicate(curItem.value) && !removed) {
removed = true;
} else {
result = new ConsLinkedList(curItem.value, result);
}
curItem = curItem._tail;
}
return result.reverse();
}
/**
* Prepend an element at the beginning of the collection.
*/
prepend(elt: T): LinkedList<T> {
return new ConsLinkedList(elt, this);
}
/**
* Prepend multiple elements at the beginning of the collection.
*/
prependAll(elts: Iterable<T>): LinkedList<T> {
let leftToAdd = LinkedList.ofIterable(elts).reverse();
let result: LinkedList<T> = this;
while (!leftToAdd.isEmpty()) {
result = new ConsLinkedList(leftToAdd.value, result);
leftToAdd = leftToAdd._tail;
}
return result;
}
/**
* Return a new collection where each element was transformed
* by the mapper function you give.
*/
map<U>(mapper:(v:T)=>U): LinkedList<U> {
let curItem: LinkedList<T> = this;
let result: LinkedList<U> = <EmptyLinkedList<U>>emptyLinkedList;
while (!curItem.isEmpty()) {
result = new ConsLinkedList(mapper(curItem.value), result);
curItem = curItem._tail;
}
return result.reverse();
}
/**
* Apply the mapper function on every element of this collection.
* The mapper function returns an Option; if the Option is a Some,
* the value it contains is added to the result Collection, if it's
* a None, the value is discarded.
*
* LinkedList.of(1,2,6).mapOption(x => x%2===0 ?
* Option.of(x+1) : Option.none<number>())
* => LinkedList.of(3, 7)
*/
mapOption<U>(mapper:(v:T)=>Option<U>): LinkedList<U> {
let curItem: LinkedList<T> = this;
let result: LinkedList<U> = <EmptyLinkedList<U>>emptyLinkedList;
while (!curItem.isEmpty()) {
const mapped = mapper(curItem.value);
if (mapped.isSome()) {
result = new ConsLinkedList(mapped.get(), result);
}
curItem = curItem._tail;
}
return result.reverse();
}
/**
* Calls the function you give for each item in the collection,
* your function returns a collection, all the collections are
* concatenated.
* This is the monadic bind.
*/
flatMap<U>(mapper:(v:T)=>LinkedList<U>): LinkedList<U> {
let curItem: LinkedList<T> = this;
let result: LinkedList<U> = <EmptyLinkedList<U>>emptyLinkedList;
while (!curItem.isEmpty()) {
result = result.prependAll(mapper(curItem.value).reverse());
curItem = curItem._tail;
}
return result.reverse();
}
/**
* Returns true if the predicate returns true for all the
* elements in the collection.
*/
allMatch<U extends T>(predicate:(v:T)=>v is U): this is LinkedList<U>;
allMatch(predicate:(v:T)=>boolean): boolean;
allMatch(predicate:(v:T)=>boolean): boolean {
return this.find(x => !predicate(x)).isNone();
}
/**
* Returns true if there the predicate returns true for any
* element in the collection.
*/
anyMatch(predicate:(v:T)=>boolean): boolean {
return this.find(predicate).isSome();
}
/**
* Call a predicate for each element in the collection,
* build a new collection holding only the elements
* for which the predicate returned true.
*/
filter<U extends T>(predicate:(v:T)=>v is U): LinkedList<U>;
filter(predicate:(v:T)=>boolean): LinkedList<T>;
filter(predicate:(v:T)=>boolean): LinkedList<T> {
let curItem: LinkedList<T> = this;
let result: LinkedList<T> = <EmptyLinkedList<T>>emptyLinkedList;
while (!curItem.isEmpty()) {
if (predicate(curItem.value)) {
result = new ConsLinkedList(curItem.value, result);
}
curItem = curItem._tail;
}
return result.reverse();
}
/**
* Returns a new collection with elements
* sorted according to the comparator you give.
*
* const activityOrder = ["Writer", "Actor", "Director"];
* LinkedList.of({name:"George", activity: "Director"}, {name:"Robert", activity: "Actor"})
* .sortBy((p1,p2) => activityOrder.indexOf(p1.activity) - activityOrder.indexOf(p2.activity));
* => LinkedList.of({"name":"Robert","activity":"Actor"}, {"name":"George","activity":"Director"})
*
* also see [[ConsLinkedList.sortOn]]
*/
sortBy(compare: (v1:T,v2:T)=>Ordering): LinkedList<T> {
return LinkedList.ofIterable<T>(this.toArray().sort(compare));
}
/**
* Give a function associating a number or a string with
* elements from the collection, and the elements
* are sorted according to that value.
*
* LinkedList.of({a:3,b:"b"},{a:1,b:"test"},{a:2,b:"a"}).sortOn(elt=>elt.a)
* => LinkedList.of({a:1,b:"test"},{a:2,b:"a"},{a:3,b:"b"})
*
* You can also sort by multiple criteria, and request 'descending'
* sorting:
*
* LinkedList.of({a:1,b:"b"},{a:1,b:"test"},{a:2,b:"a"}).sortOn(elt=>elt.a,{desc:elt=>elt.b})
* => LinkedList.of({a:1,b:"test"},{a:1,b:"b"},{a:2,b:"a"})
*
* also see [[ConsLinkedList.sortBy]]
*/
sortOn(...getKeys: Array<ToOrderable<T>|{desc:ToOrderable<T>}>): LinkedList<T> {
return <LinkedList<T>>SeqHelpers.sortOn<T>(this, getKeys);
}
/**
* Remove duplicate items; elements are mapped to keys, those
* get compared.
*
* LinkedList.of(1,1,2,3,2,3,1).distinctBy(x => x)
* => LinkedList.of(1,2,3)
*/
distinctBy<U>(keyExtractor: (x:T)=>U&WithEquality): LinkedList<T> {
return <LinkedList<T>>SeqHelpers.distinctBy(this, keyExtractor);
}
/**
* Call a function for element in the collection.
*/
forEach(fn: (v:T)=>void): LinkedList<T> {
let curItem: LinkedList<T> = this;
while (!curItem.isEmpty()) {
fn(curItem.value);
curItem = curItem._tail;
}
return this;
}
/**
* Reduces the collection to a single value by repeatedly
* calling the combine function.
* No starting value. The order in which the elements are
* passed to the combining function is undetermined.
*/
reduce(combine: (v1:T,v2:T)=>T): Option<T> {
return SeqHelpers.reduce(this, combine);
}
/**
* Compare values in the collection and return the smallest element.
* Returns Option.none if the collection is empty.
*
* also see [[ConsLinkedList.minOn]]
*/
minBy(compare: (v1:T,v2:T)=>Ordering): Option<T> {
return SeqHelpers.minBy(this, compare);
}
/**
* Call the function you give for each value in the collection
* and return the element for which the result was the smallest.
* Returns Option.none if the collection is empty.
*
* LinkedList.of({name:"Joe", age:12}, {name:"Paula", age:6}).minOn(x=>x.age)
* => Option.of({name:"Paula", age:6})
*
* also see [[ConsLinkedList.minBy]]
*/
minOn(getOrderable: ToOrderable<T>): Option<T> {
return SeqHelpers.minOn(this, getOrderable);
}
/**
* Compare values in the collection and return the largest element.
* Returns Option.none if the collection is empty.
*
* LinkedList.of({name:"Joe", age:12}, {name:"Paula", age:6}).maxOn(x=>x.age)
* => Option.of({name:"Joe", age:12})
*
* also see [[ConsLinkedList.maxOn]]
*/
maxBy(compare: (v1:T,v2:T)=>Ordering): Option<T> {
return SeqHelpers.maxBy(this, compare);
}
/**
* Call the function you give for each value in the collection
* and return the element for which the result was the largest.
* Returns Option.none if the collection is empty.
*
* also see [[ConsLinkedList.maxBy]]
*/
maxOn(getOrderable: ToOrderable<T>): Option<T> {
return SeqHelpers.maxOn(this, getOrderable);
}
/**
* Call the function you give for each element in the collection
* and sum all the numbers, return that sum.
* Will return 0 if the collection is empty.
*
* LinkedList.of(1,2,3).sumOn(x=>x)
* => 6
*/
sumOn(getNumber: (v:T)=>number): number {
return SeqHelpers.sumOn(this, getNumber);
}
/**
* Slides a window of a specific size over the sequence.
* Returns a lazy stream so memory use is not prohibitive.
*
* LinkedList.of(1,2,3,4,5,6,7,8).sliding(3)
* => Stream.of(LinkedList.of(1,2,3), LinkedList.of(4,5,6), LinkedList.of(7,8))
*/
sliding(count:number): Stream<ConsLinkedList<T>> {
return <Stream<ConsLinkedList<T>>>SeqHelpers.sliding(this, count);
}
/**
* Apply the function you give to all elements of the sequence
* in turn, keeping the intermediate results and returning them
* along with the final result in a list.
*
* LinkedList.of(1,2,3).scanLeft(0, (soFar,cur)=>soFar+cur)
* => LinkedList.of(0,1,3,6)
*/
scanLeft<U>(init:U, fn:(soFar:U,cur:T)=>U): LinkedList<U> {
let result = LinkedList.of(init);
let curItem: LinkedList<T> = this;
let soFar = init;
while (!curItem.isEmpty()) {
soFar = fn(soFar, curItem.value);
result = new ConsLinkedList(soFar, result);
curItem = curItem._tail;
}
return result.reverse();
}
/**
* Apply the function you give to all elements of the sequence
* in turn, keeping the intermediate results and returning them
* along with the final result in a list.
* The first element of the result is the final cumulative result.
*
* LinkedList.of(1,2,3).scanRight(0, (cur,soFar)=>soFar+cur)
* => LinkedList.of(6,5,3,0)
*/
scanRight<U>(init:U, fn:(cur:T,soFar:U)=>U): LinkedList<U> {
let result = LinkedList.of(init);
let curItem: LinkedList<T> = this.reverse();
let soFar = init;
while (!curItem.isEmpty()) {
soFar = fn(curItem.value, soFar);
result = new ConsLinkedList(soFar, result);
curItem = curItem._tail;
}
return result;
}
/**
* Joins elements of the collection by a separator.
* Example:
*
* LinkedList.of(1,2,3).mkString(", ")
* => "1, 2, 3"
*/
mkString(separator: string): string {
let r = "";
let curItem: LinkedList<T> = this;
let isNotFirst = false;
while (!curItem.isEmpty()) {
if (isNotFirst) {
r += separator;
}
r += SeqHelpers.toStringHelper(curItem.value, {quoteStrings:false});
curItem = curItem._tail;
isNotFirst = true;
}
return r;
}
/**
* Convert to array.
* Don't do it on an infinite stream!
*/
toArray(): T[] {
let r:T[] = [];
let curItem: LinkedList<T> = this;
while (!curItem.isEmpty()) {
r.push(curItem.value);
curItem = curItem._tail;
}
return r;
}
/**
* Convert to vector.
* Don't do it on an infinite stream!
*/
toVector(): Vector<T> {
return Vector.ofIterable<T>(this.toArray());
}
/**
* Convert this collection to a map. You give a function which
* for each element in the collection returns a pair. The
* key of the pair will be used as a key in the map, the value,
* as a value in the map. If several values get the same key,
* entries will be lost.
*
* LinkedList.of(1,2,3).toMap(x=>[x.toString(), x])
* => HashMap.of(["1",1], ["2",2], ["3",3])
*/
toMap<K,V>(converter:(x:T)=>[K & WithEquality,V]): HashMap<K,V> {
return this.foldLeft(HashMap.empty<K,V>(), (acc,cur) => {
const converted = converter(cur);
return acc.put(converted[0], converted[1]);
});
}
/**
* Convert this collection to a set. Since the elements of the
* Seq may not support equality, you must pass a function returning
* a value supporting equality.
*
* LinkedList.of(1,2,3,3,4).toSet(x=>x)
* => HashSet.of(1,2,3,4)
*/
toSet<K>(converter:(x:T)=>K&WithEquality): HashSet<K> {
return this.foldLeft(HashSet.empty<K>(), (acc,cur) => {
return acc.add(converter(cur));
});
}
/**
* Transform this value to another value type.
* Enables fluent-style programming by chaining calls.
*/
transform<U>(converter:(x:LinkedList<T>)=>U): U {
return converter(this);
}
/**
* Two objects are equal if they represent the same value,
* regardless of whether they are the same object physically
* in memory.
*/
equals(other: LinkedList<T&WithEquality>): boolean {
if (<any>other === this) {
return true;
}
if (!other || !other.tail) {
return false;
}
contractTrueEquality("LinkedList.equals", this, other);
let myVal: LinkedList<T> = this;
let hisVal = other;
while (true) {
if (myVal.isEmpty() !== hisVal.isEmpty()) {
return false;
}
if (myVal.isEmpty()) {
// they are both empty, end of the stream
return true;
}
const myHead = myVal.value;
const hisHead = (<ConsLinkedList<T>>hisVal).value;
if ((myHead === undefined) !== (hisHead === undefined)) {
return false;
}
if (myHead === undefined || hisHead === undefined) {
// they are both undefined, the || is for TS's flow analysis
// so he realizes none of them is undefined after this.
continue;
}
if (!areEqual(myHead, hisHead)) {
return false;
}
myVal = myVal._tail;
hisVal = (<ConsLinkedList<T&WithEquality>>hisVal)._tail;
}
}
/**
* Get a number for that object. Two different values
* may get the same number, but one value must always get
* the same number. The formula can impact performance.
*/
hashCode(): number {
let hash = 1;
let curItem: LinkedList<T> = this;
while (!curItem.isEmpty()) {
hash = 31 * hash + getHashCode(curItem.value);
curItem = curItem._tail;
}
return hash;
}
[inspect](): string {
return this.toString();
}
/**
* Get a human-friendly string representation of that value.
*
* Also see [[ConsLinkedList.mkString]]
*/
toString(): string {
let curItem: LinkedList<T> = this;
let result = "LinkedList(";
while (!curItem.isEmpty()) {
result += SeqHelpers.toStringHelper(curItem.value);
const tail: LinkedList<T> = curItem._tail;
curItem = tail;
if (!curItem.isEmpty()) {
result += ", ";
}
}
return result + ")";
}
}
const emptyLinkedList = new EmptyLinkedList<any>(); | the_stack |
import {
ClientHttp2Session,
ClientHttp2Stream,
connect as http2Connect,
constants as h2constants,
IncomingHttpHeaders as IncomingHttp2Headers,
SecureClientSessionOptions,
} from "http2";
import { URL } from "url";
import { asyncGuard, syncGuard } from "callguard";
import {
AbortError,
Decoder,
TimeoutError,
} from "./core";
import { Request } from "./request";
import { Response, StreamResponse } from "./response";
import { makeOkError } from "./utils";
import {
isDestroyed,
MonkeyH2Session,
setDestroyed,
setGotGoaway,
} from "./utils-http2";
const {
HTTP2_HEADER_PATH,
} = h2constants;
interface H2SessionItem
{
firstOrigin: string;
session: ClientHttp2Session;
promise: Promise< ClientHttp2Session >;
ref: ( ) => void;
unref: ( ) => void;
}
export interface CacheableH2Session
{
ref: ( ) => void;
session: Promise< ClientHttp2Session >;
unref: ( ) => void;
}
export type PushHandler =
(
origin: string,
request: Request,
getResponse: ( ) => Promise< Response >
) => void;
export type GetDecoders = ( origin: string ) => ReadonlyArray< Decoder >;
export type GetSessionOptions =
( origin: string ) => SecureClientSessionOptions;
export class H2Context
{
public _pushHandler?: PushHandler;
// TODO: Remove in favor of protocol-agnostic origin cache
private _h2sessions = new Map< string, H2SessionItem >( );
private _h2staleSessions = new Map< string, Set< ClientHttp2Session > >( );
private _getDecoders: GetDecoders;
private _getSessionOptions: GetSessionOptions;
constructor(
getDecoders: GetDecoders,
getSessionOptions: GetSessionOptions
)
{
this._getDecoders = getDecoders;
this._getSessionOptions = getSessionOptions;
/* istanbul ignore next */
if ( process.env.DEBUG_FETCH_H2 )
{
const debug = ( line: string, ...args: Array< any > ) =>
{
// tslint:disable-next-line
console.error( line, ...args );
};
const printSession = ( origin: string, session: MonkeyH2Session ) =>
{
debug( " First origin:", origin );
debug( " Ref-counter:", session.__fetch_h2_refcount );
debug( " Destroyed:", session.destroyed );
debug( " Destroyed mark:", session.__fetch_h2_destroyed );
};
process.on( "SIGUSR2", ( ) =>
{
debug( "[Debug fetch-h2]: H2 sessions" );
debug( " Active sessions" );
[ ...this._h2sessions.entries( ) ]
.forEach( ( [ origin, { session } ] ) =>
{
printSession( origin, < MonkeyH2Session >session );
} );
debug( " Stale sessions" );
[ ...this._h2staleSessions.entries( ) ]
.forEach( ( [ origin, set ] ) =>
{
[ ...set ]
.forEach( ( session ) =>
{
printSession( origin, < MonkeyH2Session >session );
} );
} );
} );
}
}
public createHttp2(
origin: string,
onGotGoaway: ( ) => void,
extraOptions?: SecureClientSessionOptions
)
: CacheableH2Session
{
const sessionItem = this.connectHttp2( origin, extraOptions );
const { promise } = sessionItem;
// Handle session closure (delete from store)
promise
.then( session =>
{
session.once(
"close",
( ) => this.disconnect( origin, session )
);
session.once(
"goaway",
(
_errorCode: number,
_lastStreamID: number,
_opaqueData: Buffer
) =>
{
setGotGoaway( session );
onGotGoaway( );
this.releaseSession( origin );
}
);
} )
.catch( ( ) =>
{
if ( sessionItem.session )
this.disconnect( origin, sessionItem.session );
} );
this._h2sessions.set( origin, sessionItem );
const { promise: session, ref, unref } = sessionItem;
return {
ref,
unref,
session,
};
}
public disconnectSession( session: ClientHttp2Session ): Promise< void >
{
return new Promise< void >( resolve =>
{
if ( session.destroyed )
return resolve( );
session.once( "close", ( ) => resolve( ) );
session.destroy( );
} );
}
public releaseSession( origin: string ): void
{
const sessionItem = this.deleteActiveSession( origin );
if ( !sessionItem )
return;
if ( !this._h2staleSessions.has( origin ) )
this._h2staleSessions.set( origin, new Set( ) );
( < Set< ClientHttp2Session > >this._h2staleSessions.get( origin ) )
.add( sessionItem.session );
}
public deleteActiveSession( origin: string ): H2SessionItem | void
{
const sessionItem = this._h2sessions.get( origin );
if ( !sessionItem )
return;
this._h2sessions.delete( origin );
sessionItem.session.unref( );
// Never re-ref, this session is over
setDestroyed( sessionItem.session );
return sessionItem;
}
public async disconnectStaleSessions( origin: string ): Promise< void >
{
const promises: Array< Promise< void > > = [ ];
const sessionSet = this._h2staleSessions.get( origin );
if ( !sessionSet )
return;
this._h2staleSessions.delete( origin );
for ( const session of sessionSet )
promises.push( this.disconnectSession( session ) );
return Promise.all( promises ).then( ( ) => { } );
}
public disconnectAll( ): Promise< void >
{
const promises: Array< Promise< void > > = [ ];
for ( const eventualH2session of this._h2sessions.values( ) )
{
promises.push( this.handleDisconnect( eventualH2session ) );
}
this._h2sessions.clear( );
for ( const origin of this._h2staleSessions.keys( ) )
{
promises.push( this.disconnectStaleSessions( origin ) );
}
return Promise.all( promises ).then( ( ) => { } );
}
public disconnect( url: string, session?: ClientHttp2Session )
: Promise< void >
{
const { origin } = new URL( url );
const promises: Array< Promise< void > > = [ ];
const sessionItem = this.deleteActiveSession( origin );
if ( sessionItem && ( !session || sessionItem.session === session ) )
promises.push( this.handleDisconnect( sessionItem ) );
if ( !session )
{
promises.push( this.disconnectStaleSessions( origin ) );
}
else if ( this._h2staleSessions.has( origin ) )
{
const sessionSet =
< Set< ClientHttp2Session > >
this._h2staleSessions.get( origin );
if ( sessionSet.has( session ) )
{
sessionSet.delete( session );
promises.push( this.disconnectSession( session ) );
}
}
return Promise.all( promises ).then( ( ) => { } );
}
private handleDisconnect( sessionItem: H2SessionItem ): Promise< void >
{
const { promise, session } = sessionItem;
if ( session )
session.destroy( );
return promise
.then( _h2session => { } )
.catch( err =>
{
const debugMode = false;
if ( debugMode )
// tslint:disable-next-line
console.warn( "Disconnect error", err );
} );
}
private handlePush(
origin: string,
pushedStream: ClientHttp2Stream,
requestHeaders: IncomingHttp2Headers,
ref: ( ) => void,
unref: ( ) => void
)
{
if ( !this._pushHandler )
return; // Drop push. TODO: Signal through error log: #8
const path = requestHeaders[ HTTP2_HEADER_PATH ] as string;
// Remove pseudo-headers
Object.keys( requestHeaders )
.filter( name => name.charAt( 0 ) === ":" )
.forEach( name => { delete requestHeaders[ name ]; } );
const pushedRequest = new Request(
path,
{ headers: requestHeaders, allowForbiddenHeaders: true }
);
ref( );
const futureResponse = new Promise< Response >( ( resolve, reject ) =>
{
const guard = syncGuard( reject, { catchAsync: true } );
pushedStream.once( "close", unref );
pushedStream.once( "aborted", ( ) =>
reject( new AbortError( "Response aborted" ) )
);
pushedStream.once( "frameError", ( ) =>
reject( new Error( "Push request failed" ) )
);
pushedStream.once( "error", reject );
pushedStream.once( "push", guard(
( responseHeaders: IncomingHttp2Headers ) =>
{
const response = new StreamResponse(
this._getDecoders( origin ),
path,
pushedStream,
responseHeaders,
false,
{ },
void 0,
2,
false
);
resolve( response );
}
) );
} );
futureResponse
.catch( _err => { } ); // TODO: #8
const getResponse = ( ) => futureResponse;
return this._pushHandler( origin, pushedRequest, getResponse );
}
private connectHttp2(
origin: string,
extraOptions: SecureClientSessionOptions = { }
)
: H2SessionItem
{
const makeConnectionTimeout = ( ) =>
new TimeoutError( `Connection timeout to ${origin}` );
const makeError = ( event?: string ) =>
event
? new Error( `Unknown connection error (${event}): ${origin}` )
: new Error( `Connection closed` );
let session: ClientHttp2Session = < ClientHttp2Session >< any >void 0;
// TODO: #8
// tslint:disable-next-line
const aGuard = asyncGuard( console.error.bind( console ) );
const sessionRefs = { } as Pick< H2SessionItem, 'ref' | 'unref' >;
const makeRefs = ( session: ClientHttp2Session ) =>
{
const monkeySession = < MonkeyH2Session >session;
monkeySession.__fetch_h2_refcount = 1; // Begins ref'd
sessionRefs.ref = ( ) =>
{
if ( isDestroyed( session ) )
return;
if ( monkeySession.__fetch_h2_refcount === 0 )
// Go from unref'd to ref'd
session.ref( );
++monkeySession.__fetch_h2_refcount;
};
sessionRefs.unref = ( ) =>
{
if ( isDestroyed( session ) )
return;
--monkeySession.__fetch_h2_refcount;
if ( monkeySession.__fetch_h2_refcount === 0 )
// Go from ref'd to unref'd
session.unref( );
};
};
const options = {
...this._getSessionOptions( origin ),
...extraOptions,
};
const promise = new Promise< ClientHttp2Session >(
( resolve, reject ) =>
{
session =
http2Connect( origin, options, ( ) => resolve( session ) );
makeRefs( session );
session.on( "stream", aGuard(
(
stream: ClientHttp2Stream,
headers: IncomingHttp2Headers
) =>
this.handlePush(
origin,
stream,
headers,
( ) => sessionRefs.ref( ),
( ) => sessionRefs.unref( )
)
) );
session.once( "close", ( ) =>
reject( makeOkError( makeError( ) ) ) );
session.once( "timeout", ( ) =>
reject( makeConnectionTimeout( ) ) );
session.once( "error", reject );
}
);
return {
firstOrigin: origin,
promise,
ref: ( ) => sessionRefs.ref( ),
session,
unref: ( ) => sessionRefs.unref( ),
};
}
} | the_stack |
import * as React from 'react';
import * as intl from 'react-intl-universal';
import _merge from 'lodash-es/merge';
import MUIDataTable from 'mui-datatables';
import { createStyles, withStyles } from '@material-ui/core';
export type SortDirection = 'asc' | 'desc';
export interface DataTableColumn {
key: string;
title: string;
hint?: string;
filter?: boolean;
filterType?: 'checkbox' | 'dropdown' | 'multiselect' | 'textField';
sort?: boolean;
sortDirection?: SortDirection;
searchable?: boolean;
displayInPrint?: boolean;
displayInDownloadCsv?: boolean;
}
export interface DataTablePageMeta {
page: number;
count: number;
data: any[];
}
export interface DataTableOptions {
sort?: boolean;
filter?: boolean;
search?: boolean;
print?: boolean;
download?: boolean;
viewColumns?: boolean;
selectableRows?: 'multiple' | 'single' | 'none';
}
export interface MuiDataTableState {
announceText?: string;
page: number;
rowsPerPage: number;
filterList: any[];
selectedRows: {
data: any[];
lookup: object;
};
showResponsive: boolean;
searchText?: string;
}
export interface DataTableRequestParameters {
sort?: string;
sortDirection?: SortDirection;
page?: number;
pageSize?: number;
searchText?: string;
}
export interface DataTableProps {
classes: any;
data?: any[];
columns?: DataTableColumn[];
options?: DataTableOptions | any;
className?: string;
title?: string;
rowsPerPageOptions?: number[];
pagination?: boolean;
scrollable?: boolean;
selectable?: 'multiple' | 'single' | 'none' | boolean;
localePrefix?: string;
onRowClick?: (rowData: any, dataIndex: number) => void;
onRowsDelete?: (rowsData: any[]) => Promise<any>;
dataPromise?: (
parameters?: DataTableRequestParameters,
) => Promise<DataTablePageMeta>;
}
export interface DataTableState extends DataTableRequestParameters {
isLoading: boolean;
data?: any[];
recordCount?: number;
}
const styles = () => createStyles({});
class DataTableComponent extends React.Component<
DataTableProps,
DataTableState
> {
private data: any[];
private columns: any[];
private searchDelayTimer: any;
private rowsPerPageOptions: number[];
private defaultRowsPerPageOptions: number[] = [10, 20, 50];
public constructor(props: Readonly<DataTableProps>) {
super(props);
this.rowsPerPageOptions =
this.props.rowsPerPageOptions || this.defaultRowsPerPageOptions;
this.state = {
page: 1,
isLoading: false,
recordCount: 0,
pageSize: this.rowsPerPageOptions[0],
};
}
public componentDidMount() {
this.getData();
}
public componentWillUnmount() {
if (this.searchDelayTimer) {
clearTimeout(this.searchDelayTimer);
}
}
public render() {
this.data = this.props.data || this.state.data;
const { title, options } = this.props;
const { isLoading } = this.state;
const defaultOptions = {
filterType: 'dropdown',
responsive: 'scroll',
selectableRows:
this.props.selectable === true
? 'multiple'
: this.props.selectable === false
? 'none'
: this.props.selectable,
textLabels: {
body: {
noMatch: isLoading
? intl.get('loadingData')
: intl.get('noData'),
},
filter: {
all: intl.get('all'),
title: intl.get('filters'),
reset: intl.get('reset'),
},
selectedRows: {
text: intl.get('itemsSelected'),
delete: intl.get('delete'),
deleteAria: intl.get('delete'),
},
pagination: {
next: intl.get('nextPage'),
previous: intl.get('previousPage'),
rowsPerPage: intl.get('rowsPerPage'),
displayRows: intl.get('of'),
},
toolbar: {
search: intl.get('search'),
downloadCsv: intl.get('downloadCsv'),
print: intl.get('print'),
viewColumns: intl.get('viewColumns'),
filterTable: intl.get('filter'),
},
viewColumns: {
title: intl.get('viewColumns'),
titleAria: intl.get('toggleColumns'),
},
},
};
if (!this.props.columns) {
this.generateColumnsFromData();
} else {
this.generateColumnsFromProps();
}
if (this.state.sort) {
this.columns.forEach(column => {
if (column.label === this.state.sort) {
column.options.sortDirection =
this.state.sortDirection || 'asc';
}
});
}
const finalOptions = _merge(defaultOptions, options, {
responsive: this.props.scrollable ? 'scroll' : 'stacked',
pagination:
typeof this.props.pagination === 'undefined'
? true
: this.props.pagination,
count: this.state.recordCount,
page: this.state.page - 1,
rowsPerPage: this.state.pageSize,
rowsPerPageOptions: this.rowsPerPageOptions,
onRowClick: (
rowData: string[],
rowMeta: { dataIndex: number; rowIndex: number },
) => {
if (this.props.onRowClick) {
this.props.onRowClick(
this.data[rowMeta.dataIndex],
rowMeta.dataIndex,
);
}
},
onRowsDelete: (rowsDeleted: {
lookup: { [dataIndex: number]: boolean };
data: { index: number; dataIndex: number }[];
}) => {
if (this.props.onRowsDelete) {
const needDeleteRows = [];
rowsDeleted.data.forEach(item => {
needDeleteRows.push(this.data[item.dataIndex]);
});
this.props.onRowsDelete(needDeleteRows).then(() => {
this.getData();
});
// false to prevent the deletion on UI
return false;
}
return true;
},
onSearchChange: (searchText: string) => {
if (this.searchDelayTimer) {
clearTimeout(this.searchDelayTimer);
}
this.searchDelayTimer = setTimeout(() => {
this.setState(
{
page: 1,
searchText,
},
() => {
this.getData();
},
);
}, 1000);
},
onSearchClose: () => {
this.setState(
{
page: 1,
searchText: null,
},
() => {
this.getData();
},
);
},
onChangePage: (currentPage: number) => {
this.setState(
{
page: currentPage + 1,
},
() => {
this.getData();
},
);
},
onColumnSortChange: (column: string) => {
let sortDirection: SortDirection = 'asc';
if (
column === this.state.sort &&
this.state.sortDirection === 'asc'
) {
sortDirection = 'desc';
}
const changedState = this.props.dataPromise
? {
page: 1,
sort: column,
sortDirection,
}
: {
sort: column,
sortDirection,
};
this.setState(changedState, () => {
this.getData();
});
},
onTableChange: (action: string, tableState: MuiDataTableState) => {
switch (action) {
case 'changeRowsPerPage':
this.setState(
{
page: 1,
pageSize: tableState.rowsPerPage,
},
() => {
this.getData();
},
);
break;
}
},
});
if (this.props.dataPromise) {
finalOptions.serverSide = true;
}
// The `key` attribute for fixing count does not refresh in pagination
// More info: https://github.com/gregnb/mui-datatables/issues/610
return (
<MUIDataTable
key={this.state.recordCount}
title={title}
count={this.state.recordCount}
data={this.data}
columns={this.columns}
options={finalOptions}
/>
);
}
private getData() {
if (this.props.dataPromise) {
this.setState({
isLoading: true,
});
this.props
.dataPromise(this.state)
.then((result: DataTablePageMeta) => {
const totalPages = Math.ceil(
result.count / this.state.pageSize,
);
this.setState({
isLoading: false,
data: result.data,
recordCount: result.count,
page:
result.page > totalPages ? totalPages : result.page,
});
});
}
}
private generateColumnsFromProps() {
this.columns = [];
this.props.columns.forEach(column => {
this.columns.push({
name: column.title,
label: column.key,
hint: column.hint,
options: {
filter: column.filter,
filterType: column.filterType,
sort: column.sort,
sortDirection: column.sortDirection,
print: column.displayInPrint,
download: column.displayInDownloadCsv,
searchable: column.searchable,
},
});
});
}
private generateColumnsFromData() {
if (this.data && this.data.length > 0) {
if (Array.isArray(this.data[0])) {
this.columns = this.data[0];
this.data = this.data.slice(1);
} else {
this.columns = [];
for (const key of Object.keys(this.data[0])) {
this.columns.push({
name: intl
.get(`${this.props.localePrefix || ''}${key}`)
.defaultMessage(key),
label: key,
options: {
filter: true,
sort: true,
},
});
}
}
}
}
}
export const DataTable = withStyles(styles)(DataTableComponent); | the_stack |
import "./App.css";
import OpenLogin from "openlogin";
import { useEffect, useState } from "react";
import { getStarkHDAccount, STARKNET_NETWORKS, sign, verify } from "@toruslabs/openlogin-starkkey";
import { binaryToHex, binaryToUtf8, bufferToBinary, bufferToHex, hexToBinary, removeHexPrefix } from "enc-utils";
import type { ec } from "elliptic";
import { deployContract, CompiledContract, waitForTx, Contract, Abi, utils, hashMessage, pedersen } from "starknet";
import CompiledAccountContractAbi from "./contracts/account_abi.json";
import { BN } from "bn.js";
const YOUR_PROJECT_ID = "BLTJPXxanIYyNTauQRb0dLJBYClvh6nU8G1SPct3K0ZUDksMgs1B5Sb-q533ng7a_owi4gHj1nvZZ_sK79b2Juw";
const openlogin = new OpenLogin({
// your clientId aka projectId , get it from https://developer.tor.us
// clientId is not required for localhost, you can set it to any string
// for development
clientId: YOUR_PROJECT_ID,
network: "testnet",
// you can pass login config to modify default
// login options in login modal, also you can pass
// your own verifiers.
loginConfig: {
google: {
verifier: "tkey-google-lrc",
name: "google",
typeOfLogin: "google",
showOnModal: true,
showOnDesktop: true,
showOnMobile: true,
},
facebook: {
verifier: "tkey-facebook-lrc",
name: "facebook",
typeOfLogin: "facebook",
showOnModal: true,
showOnDesktop: false,
showOnMobile: true,
mainOption: true,
description: "facebook social login",
},
// twitter: {
// verifier: "YOUR_CUSTOM_VERIFIER",
// name: "facebook",
// typeOfLogin: "facebook",
// showOnModal: true,
// showOnDesktop: true,
// showOnMobile: false,
// mainOption: true,
// description: "any description",
// },
},
});
function App() {
const [loading, setLoading] = useState(false);
const [CompiledAccountContract, setCompiledAccountContract] = useState<CompiledContract | null>(null);
const [contractAddress, setContractAddress] = useState<string | null>(null);
const printToConsole = (...args: unknown[]): void => {
const el = document.querySelector("#console>p");
if (el) {
el.innerHTML = JSON.stringify(args || {}, null, 2);
}
};
const printUserInfo = async () => {
const userInfo = await openlogin.getUserInfo();
printToConsole(userInfo);
};
async function login() {
setLoading(true);
try {
const privKey = await openlogin.login({
// pass empty string '' as loginProvider to open default torus modal
// with all default supported login providers or you can pass specific
// login provider from available list to set as default.
// for ex: google, facebook, twitter etc
loginProvider: "",
redirectUrl: `${window.location.origin}`,
relogin: true,
// setting it true will force user to use touchid/faceid (if available on device)
// while doing login again
fastLogin: false,
// setting skipTKey to true will display a button to user to skip
// openlogin security while login.
// But caveat here is that user will be get different keys if user is skipping tkey
// so use this option with care in your app or make sure user knows about this.
skipTKey: false,
// you can pass standard oauth parameter in extralogin options
// for ex: in case of passwordless login, you have to pass user's email as login_hint
// and your app domain.
// extraLoginOptions: {
// domain: 'www.yourapp.com',
// login_hint: 'hello@yourapp.com',
// },
});
if (privKey && typeof privKey === "string") {
await printUserInfo();
}
setLoading(false);
} catch (error) {
console.log("error", error);
setLoading(false);
}
}
useEffect(() => {
setLoading(true);
fetch("https://raw.githubusercontent.com/himanshuchawla009/cairo-contracts/master/account_compiled.json")
.then((response) => response.json())
.then((responseJson) => {
setCompiledAccountContract(responseJson);
})
.catch((error) => {
printToConsole(error);
});
async function initializeOpenlogin() {
try {
await openlogin.init();
if (openlogin.privKey) {
await printUserInfo();
}
setLoading(false);
} catch (error) {
console.log("error while initialization", error);
} finally {
setLoading(false);
}
}
initializeOpenlogin();
}, []);
const getStarkAccount = (index: number): ec.KeyPair => {
const account = getStarkHDAccount(openlogin.privKey, index, STARKNET_NETWORKS.testnet);
return account;
};
const starkHdAccount = (e: any): ec.KeyPair => {
e.preventDefault();
const accIndex = 1;
const account = getStarkAccount(accIndex);
printToConsole({
privKey: account.getPrivate("hex"),
pubKey: account.getPublic("hex"),
});
return account;
};
const strToHex = (str: string): string => {
return binaryToHex(bufferToBinary(Buffer.from(str, "utf8")).padEnd(252, "0"));
};
/**
*
* @param str utf 8 string to be signed
* @param prefix utf-8 prefix padded to 252 bits (optional)
* @returns
*/
const getPedersenHashRecursively = (str: string, prefix?: string): string => {
const TEST_MESSAGE_SUFFIX = prefix || "OPENLOGIN STARKWARE-";
const x = Buffer.from(str, "utf8");
const binaryStr = hexToBinary(bufferToHex(x));
const rounds = Math.ceil(binaryStr.length / 252);
if (rounds > 1) {
const currentChunkHex = binaryToHex(binaryStr.substring(0, 252));
const hash = pedersen([strToHex(TEST_MESSAGE_SUFFIX), new BN(currentChunkHex, "hex").toString(16)]);
const pendingStr = binaryToUtf8(binaryStr.substring(252));
return getPedersenHashRecursively(pendingStr.replace("\n", ""), hash);
}
const currentChunkHex = binaryToHex(binaryStr.padEnd(252, "0"));
return pedersen([utils.number.toBN(strToHex(TEST_MESSAGE_SUFFIX), "hex"), utils.number.toBN(currentChunkHex, "hex")]);
};
const signMessageWithStarkKey = (e: any) => {
e.preventDefault();
const accIndex = 1;
const message = e.target[0].value;
const keyPair = getStarkAccount(accIndex);
const hash = getPedersenHashRecursively(message);
const signed = sign(keyPair, removeHexPrefix(hash));
printToConsole({
pedersenHash: hash,
info: `Message signed successfully: OPENLOGIN STARKWARE- ${message}`,
signedMesssage: signed,
});
};
const validateStarkMessage = (e: any) => {
e.preventDefault();
const signingAccountIndex = 1;
const originalMessage = e.target[0].value;
const signedMessage = JSON.parse(e.target[1].value) as ec.Signature;
if (!signedMessage.r || !signedMessage.s || signedMessage.recoveryParam === undefined) {
printToConsole("Invalid signature format");
return;
}
const finalSignature = {
r: new BN(signedMessage.r, "hex"),
s: new BN(signedMessage.s, "hex"),
recoveryParam: signedMessage.recoveryParam,
};
const keyPair = getStarkAccount(signingAccountIndex);
const hash = getPedersenHashRecursively(originalMessage);
const isVerified = verify(keyPair, removeHexPrefix(hash), finalSignature as unknown as ec.Signature);
printToConsole(`Message is verified: ${isVerified}`);
};
const deployAccountContract = async () => {
try {
if (!CompiledAccountContract) {
printToConsole("Compiled contract is not downloaded, plz try again");
return;
}
const accountIndex = 1;
const keyPair = getStarkAccount(accountIndex);
const compressedPubKey = keyPair.getPublic().getX().toString(16, 64);
const txRes = await deployContract(JSON.parse(JSON.stringify(CompiledAccountContract)) as CompiledContract, [
new BN(compressedPubKey, 16).toString(),
]);
printToConsole("deployed account contract,", {
contractRes: txRes,
l2AccountAddress: txRes.address,
txStatusLink: `https://voyager.online/tx/${txRes.transaction_hash}`,
});
await waitForTx(txRes.transaction_hash);
printToConsole("successfully included in a block on l2", {
txStatusLink: `https://voyager.online/tx/${txRes.transaction_hash}`,
});
} catch (error) {
printToConsole(error);
}
};
const onContractAddressChange = (e: any) => {
setContractAddress(e.target.value);
};
const initializeAccountContract = async () => {
try {
if (!contractAddress) {
printToConsole("PLease input contract/account address");
return;
}
const contract = new Contract(CompiledAccountContractAbi as Abi[], contractAddress);
const txRes = await contract.invoke("initialize", {
_address: contractAddress,
});
printToConsole("deployed account contract,", {
contractRes: txRes,
txStatusLink: `https://voyager.online/tx/${txRes.transaction_hash}`,
});
await waitForTx(txRes.transaction_hash);
printToConsole("successfully included in a block", {
txStatusLink: `https://voyager.online/tx/${txRes.transaction_hash}`,
});
} catch (error) {
printToConsole(error);
}
};
const getPublickeyFromContract = async () => {
try {
if (!contractAddress) {
printToConsole("PLease input contract/account address");
return;
}
const account = new Contract(CompiledAccountContractAbi as Abi[], contractAddress);
const res = await account.call("get_public_key", {});
printToConsole(res);
} catch (error) {
printToConsole((error as Error).toString());
}
};
const getAddressFromContract = async () => {
try {
if (!contractAddress) {
printToConsole("PLease input contract/account address");
return;
}
const account = new Contract(CompiledAccountContractAbi as Abi[], contractAddress);
const res = await account.call("get_address", {});
printToConsole(res);
} catch (error) {
printToConsole(error);
}
};
// Note: this function is setting a new public key for the account that belong to account index 2
// of this hd account, once this transaction is successful, you can only using account index 2 for
// executing future transactions.
const updatePublickeyInContract = async () => {
try {
if (!contractAddress) {
printToConsole("PLease input contract/account address");
return;
}
const newAccountIndex = 3;
const keyPair = getStarkAccount(newAccountIndex);
const compressedPubKey = keyPair.getPublic().getX().toString(16, 64);
const account = new Contract(CompiledAccountContractAbi as Abi[], contractAddress);
const { res: nonceRes } = await account.call("get_nonce");
const msgHash = removeHexPrefix(
hashMessage(
contractAddress,
contractAddress,
utils.starknet.getSelectorFromName("set_public_key"),
[
new BN(compressedPubKey, 16).toString(),
// contractAddress,
],
nonceRes.toString()
)
);
const signingAccountIndex = 1;
const signingKeyPair = getStarkAccount(signingAccountIndex);
const { r, s } = sign(signingKeyPair, msgHash);
const res = await account.invoke(
"execute",
{
to: contractAddress,
selector: utils.starknet.getSelectorFromName("set_public_key"),
calldata: [
new BN(compressedPubKey, 16).toString(),
// contractAddress,
],
},
[utils.number.toHex(r), utils.number.toHex(s)]
);
printToConsole(res);
await waitForTx(res.transaction_hash);
printToConsole("transaction successfully included in a block", {
txStatusLink: `https://voyager.online/tx/${res.transaction_hash}`,
});
} catch (error) {
console.log(error);
printToConsole((error as Error).toString());
}
};
const logout = async () => {
try {
setLoading(true);
await openlogin.logout({});
} catch (error) {
printToConsole("error while logout", error);
} finally {
setLoading(false);
}
};
return (
<>
{loading ? (
<div>
<div
style={{
display: "flex",
flexDirection: "column",
width: "100%",
justifyContent: "center",
alignItems: "center",
margin: 20,
}}
>
<h1>....loading</h1>
</div>
</div>
) : (
<div className="App">
{!openlogin.privKey ? (
<div>
<h3>Openlogin X Starkware</h3>
<button onClick={login}>login</button>
</div>
) : (
<div>
<section>
<div>
Openlogin Private key:
<i>{openlogin.privKey}</i>
<button onClick={() => logout()}>Logout</button>
</div>
<div>
<div style={{ display: "flex", flexDirection: "row", justifyContent: "center", alignItems: "center" }}>
<button onClick={printUserInfo}>Get User Info</button>
<form onSubmit={starkHdAccount}>
<button type="submit">Get Stark Account </button>
</form>
<button onClick={deployAccountContract}>Deploy Account Contract (Step 1)</button>
</div>
<br />
<div style={{ display: "flex", flexDirection: "column", justifyContent: "center", alignItems: "center" }}>
<textarea
id="contractAddress"
rows={3}
cols={50}
placeholder="Enter Contract/L2 account address"
onChange={onContractAddressChange}
/>
<div style={{ display: "flex", flexDirection: "row", justifyContent: "center", alignItems: "center" }}>
<button onClick={initializeAccountContract}>Initialize Account Contract (Step 2) </button>
<button onClick={getPublickeyFromContract}>Get Publickey From Contract (Step 3)</button>
<button onClick={getAddressFromContract}>Get Address From Contract (Step 3)</button>
<button onClick={updatePublickeyInContract}>Update Publickey In Contract (Step 3)</button>
</div>
</div>
<br />
<br />
<hr />
<form
onSubmit={signMessageWithStarkKey}
style={{ display: "flex", flexDirection: "column", justifyContent: "center", alignItems: "center" }}
>
<textarea id="message" rows={3} cols={50} placeholder="Enter message" required />
<button type="submit">Sign Message with StarkKey </button>
</form>
<br />
<br />
<hr />
<form
onSubmit={validateStarkMessage}
style={{ display: "flex", flexDirection: "column", justifyContent: "center", alignItems: "center" }}
>
<textarea id="originalMessage" cols={100} rows={5} placeholder="Enter Original Message" required />
<textarea id="signedMessage" cols={100} rows={5} placeholder="Enter Signed Message" required />
<button type="submit">Validate Stark Message</button>
</form>
<div id="console" style={{ whiteSpace: "pre-line" }}>
<p style={{ whiteSpace: "pre-line" }} />
</div>
</div>
</section>
</div>
)}
</div>
)}
</>
);
}
export default App; | the_stack |
import * as msRest from "@azure/ms-rest-js";
import * as Models from "../models";
import * as Mappers from "../models/accessPoliciesMappers";
import * as Parameters from "../models/parameters";
import { TimeSeriesInsightsClientContext } from "../timeSeriesInsightsClientContext";
/** Class representing a AccessPolicies. */
export class AccessPolicies {
private readonly client: TimeSeriesInsightsClientContext;
/**
* Create a AccessPolicies.
* @param {TimeSeriesInsightsClientContext} client Reference to the service client.
*/
constructor(client: TimeSeriesInsightsClientContext) {
this.client = client;
}
/**
* Create or update an access policy in the specified environment.
* @param resourceGroupName Name of an Azure Resource group.
* @param environmentName The name of the Time Series Insights environment associated with the
* specified resource group.
* @param accessPolicyName Name of the access policy.
* @param parameters Parameters for creating an access policy.
* @param [options] The optional parameters
* @returns Promise<Models.AccessPoliciesCreateOrUpdateResponse>
*/
createOrUpdate(resourceGroupName: string, environmentName: string, accessPolicyName: string, parameters: Models.AccessPolicyCreateOrUpdateParameters, options?: msRest.RequestOptionsBase): Promise<Models.AccessPoliciesCreateOrUpdateResponse>;
/**
* @param resourceGroupName Name of an Azure Resource group.
* @param environmentName The name of the Time Series Insights environment associated with the
* specified resource group.
* @param accessPolicyName Name of the access policy.
* @param parameters Parameters for creating an access policy.
* @param callback The callback
*/
createOrUpdate(resourceGroupName: string, environmentName: string, accessPolicyName: string, parameters: Models.AccessPolicyCreateOrUpdateParameters, callback: msRest.ServiceCallback<Models.AccessPolicyResource>): void;
/**
* @param resourceGroupName Name of an Azure Resource group.
* @param environmentName The name of the Time Series Insights environment associated with the
* specified resource group.
* @param accessPolicyName Name of the access policy.
* @param parameters Parameters for creating an access policy.
* @param options The optional parameters
* @param callback The callback
*/
createOrUpdate(resourceGroupName: string, environmentName: string, accessPolicyName: string, parameters: Models.AccessPolicyCreateOrUpdateParameters, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.AccessPolicyResource>): void;
createOrUpdate(resourceGroupName: string, environmentName: string, accessPolicyName: string, parameters: Models.AccessPolicyCreateOrUpdateParameters, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.AccessPolicyResource>, callback?: msRest.ServiceCallback<Models.AccessPolicyResource>): Promise<Models.AccessPoliciesCreateOrUpdateResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
environmentName,
accessPolicyName,
parameters,
options
},
createOrUpdateOperationSpec,
callback) as Promise<Models.AccessPoliciesCreateOrUpdateResponse>;
}
/**
* Gets the access policy with the specified name in the specified environment.
* @param resourceGroupName Name of an Azure Resource group.
* @param environmentName The name of the Time Series Insights environment associated with the
* specified resource group.
* @param accessPolicyName The name of the Time Series Insights access policy associated with the
* specified environment.
* @param [options] The optional parameters
* @returns Promise<Models.AccessPoliciesGetResponse>
*/
get(resourceGroupName: string, environmentName: string, accessPolicyName: string, options?: msRest.RequestOptionsBase): Promise<Models.AccessPoliciesGetResponse>;
/**
* @param resourceGroupName Name of an Azure Resource group.
* @param environmentName The name of the Time Series Insights environment associated with the
* specified resource group.
* @param accessPolicyName The name of the Time Series Insights access policy associated with the
* specified environment.
* @param callback The callback
*/
get(resourceGroupName: string, environmentName: string, accessPolicyName: string, callback: msRest.ServiceCallback<Models.AccessPolicyResource>): void;
/**
* @param resourceGroupName Name of an Azure Resource group.
* @param environmentName The name of the Time Series Insights environment associated with the
* specified resource group.
* @param accessPolicyName The name of the Time Series Insights access policy associated with the
* specified environment.
* @param options The optional parameters
* @param callback The callback
*/
get(resourceGroupName: string, environmentName: string, accessPolicyName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.AccessPolicyResource>): void;
get(resourceGroupName: string, environmentName: string, accessPolicyName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.AccessPolicyResource>, callback?: msRest.ServiceCallback<Models.AccessPolicyResource>): Promise<Models.AccessPoliciesGetResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
environmentName,
accessPolicyName,
options
},
getOperationSpec,
callback) as Promise<Models.AccessPoliciesGetResponse>;
}
/**
* Updates the access policy with the specified name in the specified subscription, resource group,
* and environment.
* @param resourceGroupName Name of an Azure Resource group.
* @param environmentName The name of the Time Series Insights environment associated with the
* specified resource group.
* @param accessPolicyName The name of the Time Series Insights access policy associated with the
* specified environment.
* @param accessPolicyUpdateParameters Request object that contains the updated information for the
* access policy.
* @param [options] The optional parameters
* @returns Promise<Models.AccessPoliciesUpdateResponse>
*/
update(resourceGroupName: string, environmentName: string, accessPolicyName: string, accessPolicyUpdateParameters: Models.AccessPolicyUpdateParameters, options?: msRest.RequestOptionsBase): Promise<Models.AccessPoliciesUpdateResponse>;
/**
* @param resourceGroupName Name of an Azure Resource group.
* @param environmentName The name of the Time Series Insights environment associated with the
* specified resource group.
* @param accessPolicyName The name of the Time Series Insights access policy associated with the
* specified environment.
* @param accessPolicyUpdateParameters Request object that contains the updated information for the
* access policy.
* @param callback The callback
*/
update(resourceGroupName: string, environmentName: string, accessPolicyName: string, accessPolicyUpdateParameters: Models.AccessPolicyUpdateParameters, callback: msRest.ServiceCallback<Models.AccessPolicyResource>): void;
/**
* @param resourceGroupName Name of an Azure Resource group.
* @param environmentName The name of the Time Series Insights environment associated with the
* specified resource group.
* @param accessPolicyName The name of the Time Series Insights access policy associated with the
* specified environment.
* @param accessPolicyUpdateParameters Request object that contains the updated information for the
* access policy.
* @param options The optional parameters
* @param callback The callback
*/
update(resourceGroupName: string, environmentName: string, accessPolicyName: string, accessPolicyUpdateParameters: Models.AccessPolicyUpdateParameters, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.AccessPolicyResource>): void;
update(resourceGroupName: string, environmentName: string, accessPolicyName: string, accessPolicyUpdateParameters: Models.AccessPolicyUpdateParameters, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.AccessPolicyResource>, callback?: msRest.ServiceCallback<Models.AccessPolicyResource>): Promise<Models.AccessPoliciesUpdateResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
environmentName,
accessPolicyName,
accessPolicyUpdateParameters,
options
},
updateOperationSpec,
callback) as Promise<Models.AccessPoliciesUpdateResponse>;
}
/**
* Deletes the access policy with the specified name in the specified subscription, resource group,
* and environment
* @param resourceGroupName Name of an Azure Resource group.
* @param environmentName The name of the Time Series Insights environment associated with the
* specified resource group.
* @param accessPolicyName The name of the Time Series Insights access policy associated with the
* specified environment.
* @param [options] The optional parameters
* @returns Promise<msRest.RestResponse>
*/
deleteMethod(resourceGroupName: string, environmentName: string, accessPolicyName: string, options?: msRest.RequestOptionsBase): Promise<msRest.RestResponse>;
/**
* @param resourceGroupName Name of an Azure Resource group.
* @param environmentName The name of the Time Series Insights environment associated with the
* specified resource group.
* @param accessPolicyName The name of the Time Series Insights access policy associated with the
* specified environment.
* @param callback The callback
*/
deleteMethod(resourceGroupName: string, environmentName: string, accessPolicyName: string, callback: msRest.ServiceCallback<void>): void;
/**
* @param resourceGroupName Name of an Azure Resource group.
* @param environmentName The name of the Time Series Insights environment associated with the
* specified resource group.
* @param accessPolicyName The name of the Time Series Insights access policy associated with the
* specified environment.
* @param options The optional parameters
* @param callback The callback
*/
deleteMethod(resourceGroupName: string, environmentName: string, accessPolicyName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<void>): void;
deleteMethod(resourceGroupName: string, environmentName: string, accessPolicyName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<void>, callback?: msRest.ServiceCallback<void>): Promise<msRest.RestResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
environmentName,
accessPolicyName,
options
},
deleteMethodOperationSpec,
callback);
}
/**
* Lists all the available access policies associated with the environment.
* @param resourceGroupName Name of an Azure Resource group.
* @param environmentName The name of the Time Series Insights environment associated with the
* specified resource group.
* @param [options] The optional parameters
* @returns Promise<Models.AccessPoliciesListByEnvironmentResponse>
*/
listByEnvironment(resourceGroupName: string, environmentName: string, options?: msRest.RequestOptionsBase): Promise<Models.AccessPoliciesListByEnvironmentResponse>;
/**
* @param resourceGroupName Name of an Azure Resource group.
* @param environmentName The name of the Time Series Insights environment associated with the
* specified resource group.
* @param callback The callback
*/
listByEnvironment(resourceGroupName: string, environmentName: string, callback: msRest.ServiceCallback<Models.AccessPolicyListResponse>): void;
/**
* @param resourceGroupName Name of an Azure Resource group.
* @param environmentName The name of the Time Series Insights environment associated with the
* specified resource group.
* @param options The optional parameters
* @param callback The callback
*/
listByEnvironment(resourceGroupName: string, environmentName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.AccessPolicyListResponse>): void;
listByEnvironment(resourceGroupName: string, environmentName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.AccessPolicyListResponse>, callback?: msRest.ServiceCallback<Models.AccessPolicyListResponse>): Promise<Models.AccessPoliciesListByEnvironmentResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
environmentName,
options
},
listByEnvironmentOperationSpec,
callback) as Promise<Models.AccessPoliciesListByEnvironmentResponse>;
}
}
// Operation Specifications
const serializer = new msRest.Serializer(Mappers);
const createOrUpdateOperationSpec: msRest.OperationSpec = {
httpMethod: "PUT",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments/{environmentName}/accessPolicies/{accessPolicyName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.environmentName1,
Parameters.accessPolicyName0
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "parameters",
mapper: {
...Mappers.AccessPolicyCreateOrUpdateParameters,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.AccessPolicyResource
},
201: {
bodyMapper: Mappers.AccessPolicyResource
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const getOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments/{environmentName}/accessPolicies/{accessPolicyName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.environmentName1,
Parameters.accessPolicyName1
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.AccessPolicyResource
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const updateOperationSpec: msRest.OperationSpec = {
httpMethod: "PATCH",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments/{environmentName}/accessPolicies/{accessPolicyName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.environmentName1,
Parameters.accessPolicyName1
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "accessPolicyUpdateParameters",
mapper: {
...Mappers.AccessPolicyUpdateParameters,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.AccessPolicyResource
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const deleteMethodOperationSpec: msRest.OperationSpec = {
httpMethod: "DELETE",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments/{environmentName}/accessPolicies/{accessPolicyName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.environmentName1,
Parameters.accessPolicyName1
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {},
204: {},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listByEnvironmentOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments/{environmentName}/accessPolicies",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.environmentName1
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.AccessPolicyListResponse
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
}; | the_stack |
import _ from "lodash";
import { ModuleProcessor } from "./module-processor";
import AnsiConvert from "ansi-to-html";
/**
* @param html
*/
export function escapeHtml(html) {
const htmlMap = {
"<": "<",
">": ">"
};
return html.replace(/([<>])/g, (_m, a) => htmlMap[a]);
}
/**
* @param stats
*/
export function getInfo(stats) {
const info = _.pick(stats, ["hash", "version", "time", "publicPath"]);
return info;
}
/**
* @param stats
*/
export function getAssets(stats) {
if (stats.assets && stats.assets.length > 0) {
return stats.assets.map(asset => {
return _.pick(asset, ["name", "size", "chunks", "emitted", "chunkNames"]);
});
}
return [];
}
/**
* @param stats
*/
export function getModulesByPkg(stats) {
const processor = new ModuleProcessor(stats);
return {
modulesByPkg: processor.makeModulesByPackage(),
totalSize: processor.totalSize
};
}
/**
* @param logs
*/
export function logsToHtml(logs) {
if (logs) {
const convert = new AnsiConvert();
return logs.map(x => convert.toHtml(escapeHtml(x)));
}
return [];
}
/**
* @param stats
*/
export function getErrorsHtml(stats) {
return logsToHtml(stats.errors);
}
/**
* @param stats
*/
export function getWarningsHtml(stats) {
return logsToHtml(stats.warnings);
}
//
// A quick HTML output adapted from original Stat.jsonToString
//
/* eslint-disable */
export function jsonToHtml(obj, useColors, anchors) {
anchors = anchors || {};
const buf = [];
const normal = str => buf.push(str);
const bold = useColors ? str => buf.push(`<b>${str}</b>`) : str => buf.push(str);
const colorOut = useColors
? (color, str) => buf.push(`<span style="color:${color}">${str}</span>`)
: (color, str) => buf.push(str);
const yellow = str => colorOut("#cccc33", str);
const red = str => colorOut("red", str);
const green = str => colorOut("green", str);
const cyan = str => colorOut("cyan", str);
const magenta = str => colorOut("magenta", str);
const coloredTime = time => {
let times = [800, 400, 200, 100];
if (obj.time) {
times = [obj.time / 2, obj.time / 4, obj.time / 8, obj.time / 16];
}
if (time < times[3]) {
normal(time + "ms");
} else if (time < times[2]) {
bold(time + "ms");
} else if (time < times[1]) {
green(time + "ms");
} else if (time < times[0]) {
yellow(time + "ms");
} else {
red(time + "ms");
}
};
const newline = () => buf.push("\n");
const anchor = name => {
if (!anchors.hasOwnProperty(name)) {
anchors[name] = true;
buf.push(`<a name="${name}" id="anchor_${name}" />`);
}
};
const table = (array, formats, align, splitter = null) => {
const rows = array.length;
const cols = array[0].length;
const colSizes = new Array(cols);
for (let col = 0; col < cols; col++) {
colSizes[col] = 3;
}
for (let row = 0; row < rows; row++) {
for (let col = 0; col < cols; col++) {
const value = array[row][col] + "";
if (value.length > colSizes[col]) {
colSizes[col] = value.length;
}
}
}
for (let row = 0; row < rows; row++) {
for (let col = 0; col < cols; col++) {
const format = row === 0 ? bold : formats[col];
const value = `${array[row][col]}`;
let l = value.length;
if (align[col] === "l") {
format(value);
}
for (; l < colSizes[col] && col !== cols - 1; l++) {
normal(" ");
}
if (align[col] === "r") {
format(value);
}
if (col + 1 < cols) {
normal(splitter || " ");
}
}
newline();
}
};
const formatSize = size => {
if (size <= 0) return "0 bytes";
const abbreviations = ["bytes", "kB", "MB", "GB"];
const index = Math.floor(Math.log(size) / Math.log(1000));
return +(size / Math.pow(1000, index)).toPrecision(3) + " " + abbreviations[index];
};
//
// Generate info about webpack: Hash, version, Time, PUblicPath
//
if (obj.hash) {
normal("Hash: ");
bold(obj.hash);
newline();
}
if (obj.version) {
normal("Version: webpack ");
bold(obj.version);
newline();
}
if (typeof obj.time === "number") {
normal("Time: ");
bold(obj.time);
normal("ms");
newline();
}
if (obj.publicPath) {
normal("PublicPath: ");
bold(obj.publicPath);
newline();
}
//
// Generate info about assets
if (obj.assets && obj.assets.length > 0) {
const t = [["Asset", "Size", "Chunks", "", "Chunk Names"]];
obj.assets.forEach(asset => {
t.push([
asset.name,
formatSize(asset.size),
asset.chunks.join(", "),
asset.emitted ? "[emitted]" : "",
asset.chunkNames.join(", ")
]);
});
table(t, [green, normal, bold, green, normal], "rrrll");
}
//
// Helpers for generate info about each module
//
const modulesByIdentifier = {};
if (obj.modules) {
obj.modules.forEach(module => {
modulesByIdentifier["$" + module.identifier] = module;
});
} else if (obj.chunks) {
obj.chunks.forEach(chunk => {
if (chunk.modules) {
chunk.modules.forEach(module => {
modulesByIdentifier["$" + module.identifier] = module;
});
}
});
}
const processProfile = module => {
if (module.profile) {
normal(" ");
let sum = 0;
let allowSum = true;
const path = [];
let current = module;
while (current.issuer) {
if (!modulesByIdentifier["$" + current.issuer]) {
normal(" ... ->");
allowSum = false;
break;
}
path.unshift((current = modulesByIdentifier["$" + current.issuer]));
}
path.forEach(mod => {
normal(" [");
normal(mod.id);
normal("] ");
if (mod.profile) {
const time = (mod.profile.factory || 0) + (mod.profile.building || 0);
coloredTime(time);
sum += time;
normal(" ");
}
normal("->");
});
Object.keys(module.profile).forEach(key => {
normal(" " + key + ":");
const time = module.profile[key];
coloredTime(time);
sum += time;
});
if (allowSum) {
normal(" = ");
coloredTime(sum);
}
newline();
}
};
function processModuleAttributes(module) {
normal(" ");
normal(formatSize(module.size));
if (module.chunks) {
module.chunks.forEach(chunk => {
normal(" {");
yellow(chunk);
normal("}");
});
}
if (!module.cacheable) {
red(" [not cacheable]");
}
if (module.optional) {
yellow(" [optional]");
}
if (module.built) {
green(" [built]");
}
if (module.prefetched) {
magenta(" [prefetched]");
}
if (module.failed) red(" [failed]");
if (module.warnings)
yellow(" [" + module.warnings + " warning" + (module.warnings === 1 ? "" : "s") + "]");
if (module.errors)
red(" [" + module.errors + " error" + (module.errors === 1 ? "" : "s") + "]");
}
//
// Generate info about chunks
//
if (obj.chunks) {
obj.chunks.forEach(chunk => {
normal("chunk ");
if (chunk.id < 1000) normal(" ");
if (chunk.id < 100) normal(" ");
if (chunk.id < 10) normal(" ");
normal("{");
yellow(chunk.id);
normal("} ");
green(chunk.files.join(", "));
if (chunk.names && chunk.names.length > 0) {
normal(" (");
normal(chunk.names.join(", "));
normal(")");
}
normal(" ");
normal(formatSize(chunk.size));
chunk.parents.forEach(id => {
normal(" {");
yellow(id);
normal("}");
});
if (chunk.rendered) {
green(" [rendered]");
}
newline();
if (chunk.origins) {
chunk.origins.forEach(origin => {
normal(" > ");
if (origin.reasons && origin.reasons.length) {
yellow(origin.reasons.join(" "));
normal(" ");
}
if (origin.name) {
normal(origin.name);
normal(" ");
}
if (origin.module) {
normal("[");
normal(origin.moduleId);
normal("] ");
const module = modulesByIdentifier["$" + origin.module];
if (module) {
bold(module.name);
normal(" ");
}
if (origin.loc) {
normal(origin.loc);
}
}
newline();
});
}
if (chunk.modules) {
chunk.modules.forEach(module => {
normal(" ");
if (module.id < 1000) normal(" ");
if (module.id < 100) normal(" ");
if (module.id < 10) normal(" ");
normal("[");
normal(module.id);
normal("] ");
bold(module.name);
processModuleAttributes(module);
newline();
if (module.reasons) {
module.reasons.forEach(reason => {
normal(" ");
normal(reason.type);
normal(" ");
cyan(reason.userRequest);
if (reason.templateModules) cyan(reason.templateModules.join(" "));
normal(" [");
normal(reason.moduleId);
normal("] ");
magenta(reason.module);
if (reason.loc) {
normal(" ");
normal(reason.loc);
}
newline();
});
}
processProfile(module);
});
if (chunk.filteredModules > 0) {
normal(" + " + chunk.filteredModules + " hidden modules");
newline();
}
}
});
}
//
// Display modules
//
if (obj.modules) {
obj.modules.forEach(module => {
if (module.id < 1000) normal(" ");
if (module.id < 100) normal(" ");
if (module.id < 10) normal(" ");
normal("[");
normal(module.id);
normal("] ");
bold(module.name || module.identifier);
processModuleAttributes(module);
newline();
if (module.reasons) {
module.reasons.forEach(reason => {
normal(" ");
normal(reason.type);
normal(" ");
cyan(reason.userRequest);
if (reason.templateModules) cyan(reason.templateModules.join(" "));
normal(" [");
normal(reason.moduleId);
normal("] ");
magenta(reason.module);
if (reason.loc) {
normal(" ");
normal(reason.loc);
}
newline();
});
}
processProfile(module);
});
if (obj.filteredModules > 0) {
normal(" + " + obj.filteredModules + " hidden modules");
newline();
}
}
//
// Display warnings
//
if (obj._showWarnings && obj.warnings) {
anchor("warning");
obj.warnings.forEach(warning => {
newline();
yellow("WARNING in " + warning);
newline();
});
}
//
// Display errors
//
if (obj._showErrors && obj.errors) {
anchor("error");
const convert = new AnsiConvert();
obj.errors.forEach(error => {
newline();
red("ERROR in " + convert.toHtml(escapeHtml(error)));
newline();
});
}
//
// Display children
//
if (obj.children) {
obj.children.forEach(child => {
if (child.name) {
normal("Child ");
bold(child.name);
normal(":");
} else {
normal("Child");
}
newline();
buf.push(" ");
buf.push(this.jsonToHtml(child, useColors, anchors).replace(/\n/g, "\n "));
newline();
});
}
while (buf[buf.length - 1] === "\n") {
buf.pop();
}
return buf.join("");
} | the_stack |
declare module com {
export module google {
export module android {
export module gms {
export module internal {
export module firebase_ml_naturallanguage {
export class zzao<V> extends java.util.concurrent.Future<any> {
public static class: java.lang.Class<com.google.android.gms.internal.firebase_ml_naturallanguage.zzao<any>>;
/**
* Constructs a new instance of the com.google.android.gms.internal.firebase_ml_naturallanguage.zzao<any> interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
});
public constructor();
}
}
}
}
}
}
}
declare module com {
export module google {
export module android {
export module gms {
export module internal {
export module firebase_ml_naturallanguage {
export class zzap extends java.util.concurrent.Executor {
public static class: java.lang.Class<com.google.android.gms.internal.firebase_ml_naturallanguage.zzap>;
public execute(param0: java.lang.Runnable): void;
public static values(): any /* native.Array<com.google.android.gms.internal.firebase_ml_naturallanguage.zzap>*/;
public toString(): string;
}
}
}
}
}
}
}
declare module com {
export module google {
export module android {
export module gms {
export module internal {
export module firebase_ml_naturallanguage {
export class zzar {
public static class: java.lang.Class<com.google.android.gms.internal.firebase_ml_naturallanguage.zzar>;
}
}
}
}
}
}
}
declare module com {
export module google {
export module android {
export module gms {
export module internal {
export module firebase_ml_naturallanguage {
export class zzcp {
public static class: java.lang.Class<com.google.android.gms.internal.firebase_ml_naturallanguage.zzcp>;
}
}
}
}
}
}
}
declare module com {
export module google {
export module android {
export module gms {
export module internal {
export module firebase_ml_naturallanguage {
export class zzcq {
public static class: java.lang.Class<com.google.android.gms.internal.firebase_ml_naturallanguage.zzcq>;
public handleMessage(param0: globalAndroid.os.Message): boolean;
}
}
}
}
}
}
}
declare module com {
export module google {
export module android {
export module gms {
export module internal {
export module firebase_ml_naturallanguage {
export class zzcr {
public static class: java.lang.Class<com.google.android.gms.internal.firebase_ml_naturallanguage.zzcr>;
public getVersion(param0: string): string;
}
}
}
}
}
}
}
declare module com {
export module google {
export module android {
export module gms {
export module internal {
export module firebase_ml_naturallanguage {
export class zzcs {
public static class: java.lang.Class<com.google.android.gms.internal.firebase_ml_naturallanguage.zzcs>;
}
}
}
}
}
}
}
declare module com {
export module google {
export module android {
export module gms {
export module internal {
export module firebase_ml_naturallanguage {
export class zzct {
public static class: java.lang.Class<com.google.android.gms.internal.firebase_ml_naturallanguage.zzct>;
public run(): void;
}
}
}
}
}
}
}
declare module com {
export module google {
export module android {
export module gms {
export module internal {
export module firebase_ml_naturallanguage {
export class zzcu<T> extends java.lang.Object {
public static class: java.lang.Class<com.google.android.gms.internal.firebase_ml_naturallanguage.zzcu<any>>;
public hashCode(): number;
public toString(): string;
public equals(param0: any): boolean;
}
}
}
}
}
}
}
declare module com {
export module google {
export module android {
export module gms {
export module internal {
export module firebase_ml_naturallanguage {
export class zzcv extends java.util.concurrent.Callable<any> {
public static class: java.lang.Class<com.google.android.gms.internal.firebase_ml_naturallanguage.zzcv>;
public call(): any;
}
}
}
}
}
}
}
declare module com {
export module google {
export module android {
export module gms {
export module internal {
export module firebase_ml_naturallanguage {
export class zzcw {
public static class: java.lang.Class<com.google.android.gms.internal.firebase_ml_naturallanguage.zzcw>;
/**
* Constructs a new instance of the com.google.android.gms.internal.firebase_ml_naturallanguage.zzcw interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
zzm(): any /* com.google.android.gms.internal.firebase_ml_naturallanguage.zzay.zzs.zza*/;
});
public constructor();
}
}
}
}
}
}
}
declare module com {
export module google {
export module android {
export module gms {
export module internal {
export module firebase_ml_naturallanguage {
export class zzcx {
public static class: java.lang.Class<com.google.android.gms.internal.firebase_ml_naturallanguage.zzcx>;
}
}
}
}
}
}
}
declare module com {
export module google {
export module android {
export module gms {
export module internal {
export module firebase_ml_naturallanguage {
export class zzcy {
public static class: java.lang.Class<com.google.android.gms.internal.firebase_ml_naturallanguage.zzcy>;
}
}
}
}
}
}
}
declare module com {
export module google {
export module android {
export module gms {
export module internal {
export module firebase_ml_naturallanguage {
export class zzcz {
public static class: java.lang.Class<com.google.android.gms.internal.firebase_ml_naturallanguage.zzcz>;
/**
* Constructs a new instance of the com.google.android.gms.internal.firebase_ml_naturallanguage.zzcz interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
zzct(): void;
release(): void;
});
public constructor();
public release(): void;
}
}
}
}
}
}
}
declare module com {
export module google {
export module android {
export module gms {
export module internal {
export module firebase_ml_naturallanguage {
export class zzda extends java.util.concurrent.Callable<java.lang.Void> {
public static class: java.lang.Class<com.google.android.gms.internal.firebase_ml_naturallanguage.zzda>;
public hashCode(): number;
public equals(param0: any): boolean;
}
}
}
}
}
}
}
declare module com {
export module google {
export module android {
export module gms {
export module internal {
export module firebase_ml_naturallanguage {
export class zzdb {
public static class: java.lang.Class<com.google.android.gms.internal.firebase_ml_naturallanguage.zzdb>;
public onBackgroundStateChanged(param0: boolean): void;
}
}
}
}
}
}
}
declare module com {
export module google {
export module android {
export module gms {
export module internal {
export module firebase_ml_naturallanguage {
export class zzdc {
public static class: java.lang.Class<com.google.android.gms.internal.firebase_ml_naturallanguage.zzdc>;
public onDeleted(param0: string, param1: com.google.firebase.FirebaseOptions): void;
}
}
}
}
}
}
}
declare module com {
export module google {
export module android {
export module gms {
export module internal {
export module firebase_ml_naturallanguage {
export class zzdd {
public static class: java.lang.Class<com.google.android.gms.internal.firebase_ml_naturallanguage.zzdd>;
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export class FirebaseNaturalLanguage {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.FirebaseNaturalLanguage>;
public static getInstance(): com.google.firebase.ml.naturallanguage.FirebaseNaturalLanguage;
public getSmartReply(): com.google.firebase.ml.naturallanguage.smartreply.FirebaseSmartReply;
public getLanguageIdentification(param0: com.google.firebase.ml.naturallanguage.languageid.FirebaseLanguageIdentificationOptions): com.google.firebase.ml.naturallanguage.languageid.FirebaseLanguageIdentification;
public getLanguageIdentification(): com.google.firebase.ml.naturallanguage.languageid.FirebaseLanguageIdentification;
public getTranslator(param0: com.google.firebase.ml.naturallanguage.translate.FirebaseTranslatorOptions): com.google.firebase.ml.naturallanguage.translate.FirebaseTranslator;
public static getInstance(param0: com.google.firebase.FirebaseApp): com.google.firebase.ml.naturallanguage.FirebaseNaturalLanguage;
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export class NaturalLanguageRegistrar {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.NaturalLanguageRegistrar>;
public constructor();
public getComponents(): java.util.List<com.google.firebase.components.Component<any>>;
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export module languageid {
export class FirebaseLanguageIdentification {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.languageid.FirebaseLanguageIdentification>;
public static UNDETERMINED_LANGUAGE_CODE: string;
public static DEFAULT_IDENTIFY_LANGUAGE_CONFIDENCE_THRESHOLD: number;
public static DEFAULT_IDENTIFY_POSSIBLE_LANGUAGES_CONFIDENCE_THRESHOLD: number;
public close(): void;
public identifyPossibleLanguages(param0: string): com.google.android.gms.tasks.Task<java.util.List<com.google.firebase.ml.naturallanguage.languageid.IdentifiedLanguage>>;
public identifyLanguage(param0: string): com.google.android.gms.tasks.Task<string>;
}
export module FirebaseLanguageIdentification {
export class zza extends com.google.android.gms.internal.firebase_ml_naturallanguage.zzcz {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.languageid.FirebaseLanguageIdentification.zza>;
public release(): void;
}
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export module languageid {
export class FirebaseLanguageIdentificationOptions {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.languageid.FirebaseLanguageIdentificationOptions>;
public hashCode(): number;
public getConfidenceThreshold(): java.lang.Float;
public equals(param0: any): boolean;
}
export module FirebaseLanguageIdentificationOptions {
export class Builder {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.languageid.FirebaseLanguageIdentificationOptions.Builder>;
public constructor();
public build(): com.google.firebase.ml.naturallanguage.languageid.FirebaseLanguageIdentificationOptions;
public setConfidenceThreshold(param0: number): com.google.firebase.ml.naturallanguage.languageid.FirebaseLanguageIdentificationOptions.Builder;
}
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export module languageid {
export class IdentifiedLanguage {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.languageid.IdentifiedLanguage>;
public getConfidence(): number;
public hashCode(): number;
public toString(): string;
public getLanguageCode(): string;
public equals(param0: any): boolean;
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export module languageid {
export module internal {
export class LanguageIdentificationJni extends com.google.android.gms.internal.firebase_ml_naturallanguage.zzcz {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.languageid.internal.LanguageIdentificationJni>;
public constructor(param0: globalAndroid.content.Context);
public release(): void;
}
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export module languageid {
export class zza {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.languageid.zza>;
public call(): any;
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export module languageid {
export class zzb {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.languageid.zzb>;
public call(): any;
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export module languageid {
export class zzc extends com.google.android.gms.internal.firebase_ml_naturallanguage.zzcw {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.languageid.zzc>;
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export module languageid {
export class zzd extends com.google.android.gms.internal.firebase_ml_naturallanguage.zzcw {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.languageid.zzd>;
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export module languageid {
export class zze {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.languageid.zze>;
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export module languageid {
export class zzf {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.languageid.zzf>;
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export module smartreply {
export class FirebaseSmartReply {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.smartreply.FirebaseSmartReply>;
public close(): void;
public suggestReplies(param0: java.util.List<com.google.firebase.ml.naturallanguage.smartreply.FirebaseTextMessage>): com.google.android.gms.tasks.Task<com.google.firebase.ml.naturallanguage.smartreply.SmartReplySuggestionResult>;
}
export module FirebaseSmartReply {
export class zza extends com.google.android.gms.tasks.SuccessContinuation<com.google.android.gms.predictondevice.SmartReplyResult,com.google.firebase.ml.naturallanguage.smartreply.SmartReplySuggestionResult> {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.smartreply.FirebaseSmartReply.zza>;
}
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export module smartreply {
export class FirebaseTextMessage {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.smartreply.FirebaseTextMessage>;
public static createForRemoteUser(param0: string, param1: number, param2: string): com.google.firebase.ml.naturallanguage.smartreply.FirebaseTextMessage;
public static createForLocalUser(param0: string, param1: number): com.google.firebase.ml.naturallanguage.smartreply.FirebaseTextMessage;
public getTimestampMillis(): number;
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export module smartreply {
export class SmartReplySuggestion {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.smartreply.SmartReplySuggestion>;
public getConfidence(): number;
public getText(): string;
public toString(): string;
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export module smartreply {
export class SmartReplySuggestionResult {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.smartreply.SmartReplySuggestionResult>;
public static STATUS_SUCCESS: number;
public static STATUS_NOT_SUPPORTED_LANGUAGE: number;
public static STATUS_NO_REPLY: number;
public getStatus(): number;
public getSuggestions(): java.util.List<com.google.firebase.ml.naturallanguage.smartreply.SmartReplySuggestion>;
public toString(): string;
public getStatusCodeForDebugging(): number;
}
export module SmartReplySuggestionResult {
export class Status {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.smartreply.SmartReplySuggestionResult.Status>;
/**
* Constructs a new instance of the com.google.firebase.ml.naturallanguage.smartreply.SmartReplySuggestionResult$Status interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
*/
public constructor(implementation: {
});
public constructor();
}
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export module smartreply {
export class zza extends com.google.android.gms.internal.firebase_ml_naturallanguage.zzcw {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.smartreply.zza>;
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export module smartreply {
export class zzb extends com.google.android.gms.internal.firebase_ml_naturallanguage.zzcw {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.smartreply.zzb>;
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export module smartreply {
export class zzc {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.smartreply.zzc>;
public onFailure(param0: java.lang.Exception): void;
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export module smartreply {
export class zzd extends com.google.android.gms.tasks.Continuation<string,com.google.android.gms.tasks.Task<com.google.firebase.ml.naturallanguage.smartreply.SmartReplySuggestionResult>> {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.smartreply.zzd>;
}
}
}
}
}
}
}
declare module com {
export module google {
export module firebase {
export module ml {
export module naturallanguage {
export class zza {
public static class: java.lang.Class<com.google.firebase.ml.naturallanguage.zza>;
public create(param0: com.google.firebase.components.ComponentContainer): any;
}
}
}
}
}
}
//Generics information:
//com.google.android.gms.internal.firebase_ml_naturallanguage.zzao:1
//com.google.android.gms.internal.firebase_ml_naturallanguage.zzcu:1 | the_stack |
import { Controller, Get, Post, Body, Query, Inject, forwardRef } from "@nestjs/common";
import { ApiOperation, ApiBearerAuth, ApiTags } from "@nestjs/swagger";
import { CurrentUser } from "@/common/user.decorator";
import { AuthService } from "@/auth/auth.service";
import { ConfigService } from "@/config/config.service";
import { SubmissionService } from "@/submission/submission.service";
import { AuditLogObjectType, AuditService } from "@/audit/audit.service";
import { AuthIpLocationService } from "@/auth/auth-ip-location.service";
import { UserMigrationService } from "@/migration/user-migration.service";
import { UserEntity } from "./user.entity";
import { UserService } from "./user.service";
import { UserPrivilegeService } from "./user-privilege.service";
import { UserPrivilegeType } from "./user-privilege.entity";
import {
GetUserMetaResponseDto,
GetUserMetaRequestDto,
GetUserMetaResponseError,
SetUserPrivilegesResponseDto,
SetUserPrivilegesRequestDto,
SetUserPrivilegesResponseError,
UpdateUserProfileRequestDto,
UpdateUserProfileResponseDto,
UpdateUserProfileResponseError,
SearchUserRequestDto,
SearchUserResponseDto,
GetUserListRequestDto,
GetUserListResponseDto,
GetUserListResponseError,
GetUserDetailRequestDto,
GetUserDetailResponseDto,
GetUserDetailResponseError,
GetUserProfileRequestDto,
GetUserProfileResponseDto,
GetUserProfileResponseError,
GetUserPreferenceRequestDto,
GetUserPreferenceResponseDto,
GetUserPreferenceResponseError,
UpdateUserPreferenceRequestDto,
UpdateUserPreferenceResponseDto,
UpdateUserPreferenceResponseError,
GetUserSecuritySettingsRequestDto,
GetUserSecuritySettingsResponseDto,
GetUserSecuritySettingsResponseError,
UpdateUserPasswordRequestDto,
UpdateUserPasswordResponseDto,
UpdateUserPasswordResponseError,
UpdateUserSelfEmailRequestDto,
UpdateUserSelfEmailResponseDto,
UpdateUserSelfEmailResponseError,
QueryAuditLogsRequestDto,
QueryAuditLogsResponseDto,
QueryAuditLogsResponseError
} from "./dto";
@ApiTags("User")
@Controller("user")
export class UserController {
constructor(
private readonly userService: UserService,
private readonly authService: AuthService,
private readonly configService: ConfigService,
private readonly userPrivilegeService: UserPrivilegeService,
@Inject(forwardRef(() => SubmissionService))
private readonly submissionService: SubmissionService,
private readonly auditService: AuditService,
private readonly authIpLocationService: AuthIpLocationService,
private readonly userMigrationService: UserMigrationService
) {}
@Get("searchUser")
@ApiBearerAuth()
@ApiOperation({
summary: "Search users with a substring of the username"
})
async searchUser(
@CurrentUser() currentUser: UserEntity,
@Query() request: SearchUserRequestDto
): Promise<SearchUserResponseDto> {
const users = await this.userService.searchUser(
request.query,
request.wildcard,
this.configService.config.queryLimit.searchUser
);
return {
userMetas: await Promise.all(users.map(async user => await this.userService.getUserMeta(user, currentUser)))
};
}
@Post("getUserMeta")
@ApiBearerAuth()
@ApiOperation({
summary: "Get a user's metadata with its ID or username."
})
async getUserMeta(
@CurrentUser() currentUser: UserEntity,
@Body() request: GetUserMetaRequestDto
): Promise<GetUserMetaResponseDto> {
let user: UserEntity;
if (request.userId) {
user = await this.userService.findUserById(request.userId);
} else if (request.username) {
user = await this.userService.findUserByUsername(request.username);
}
if (!user)
return {
error: GetUserMetaResponseError.NO_SUCH_USER
};
const result: GetUserMetaResponseDto = {
meta: await this.userService.getUserMeta(user, currentUser)
};
if (request.getPrivileges) {
result.privileges = await this.userPrivilegeService.getUserPrivileges(user.id);
}
return result;
}
@Post("setUserPrivileges")
@ApiBearerAuth()
@ApiOperation({
summary: "Set a user's privileges."
})
async setUserPrivileges(
@CurrentUser() currentUser: UserEntity,
@Body() request: SetUserPrivilegesRequestDto
): Promise<SetUserPrivilegesResponseDto> {
if (!(currentUser && currentUser.isAdmin))
return {
error: SetUserPrivilegesResponseError.PERMISSION_DENIED
};
const oldPrivileges = await this.userPrivilegeService.getUserPrivileges(request.userId);
const error = await this.userPrivilegeService.setUserPrivileges(request.userId, request.privileges);
await this.auditService.log("user.set_privileges", AuditLogObjectType.User, request.userId, {
oldPrivileges,
newPrivileges: request.privileges
});
return {
error
};
}
@Post("updateUserProfile")
@ApiBearerAuth()
@ApiOperation({
summary: "Update a user's username, email, bio or password."
})
async updateUserProfile(
@CurrentUser() currentUser: UserEntity,
@Body() request: UpdateUserProfileRequestDto
): Promise<UpdateUserProfileResponseDto> {
const user = await this.userService.findUserById(request.userId);
if (!user)
return {
error: UpdateUserProfileResponseError.NO_SUCH_USER
};
if (!currentUser)
return {
error: UpdateUserProfileResponseError.PERMISSION_DENIED
};
const isUserSelf = currentUser.id === user.id;
const hasPrivilege = await this.userPrivilegeService.userHasPrivilege(currentUser, UserPrivilegeType.ManageUser);
if (!(isUserSelf || hasPrivilege))
return {
error: UpdateUserProfileResponseError.PERMISSION_DENIED
};
if (request.username !== user.username) {
if (!this.configService.config.preference.security.allowUserChangeUsername) {
// Normal users are not allowed to change their usernames
if (!hasPrivilege)
return {
error: UpdateUserProfileResponseError.PERMISSION_DENIED
};
}
}
// if (request.password) {
// // A non-admin user must give the old password to change its password
// if (!hasPrivilege) {
// const userAuth = await this.authService.findUserAuthByUserId(request.userId);
// if (!(await this.authService.checkPassword(userAuth, request.oldPassword)))
// return {
// error: UpdateUserProfileResponseError.WRONG_OLD_PASSWORD
// };
// }
// }
const oldUsername = user.username;
const oldEmail = user.email;
const error = await this.userService.updateUserProfile(
user,
request.username,
request.email,
request.publicEmail,
request.avatarInfo,
request.nickname,
request.bio,
request.information
);
if (oldUsername !== request.username) {
if (user.id === currentUser.id) {
await this.auditService.log("user.change_username", {
oldUsername,
newUsername: request.username
});
} else {
await this.auditService.log("user.change_others_username", AuditLogObjectType.User, user.id, {
oldUsername,
newUsername: request.username
});
}
}
if (oldEmail !== request.email) {
if (user.id === currentUser.id) {
await this.auditService.log("user.change_email", {
oldEmail,
newEmail: request.email
});
} else {
await this.auditService.log("user.change_others_email", AuditLogObjectType.User, user.id, {
oldEmail,
newEmail: request.email
});
}
}
return {
error
};
}
@Post("getUserList")
@ApiBearerAuth()
@ApiOperation({
summary: "Get a user list sorted by rating or accepted problems count."
})
async getUserList(
@CurrentUser() currentUser: UserEntity,
@Body() request: GetUserListRequestDto
): Promise<GetUserListResponseDto> {
if (request.takeCount > this.configService.config.queryLimit.userList)
return {
error: GetUserListResponseError.TAKE_TOO_MANY
};
const [users, count] = await this.userService.getUserList(request.sortBy, request.skipCount, request.takeCount);
return {
userMetas: await Promise.all(users.map(user => this.userService.getUserMeta(user, currentUser))),
count
};
}
@Post("getUserDetail")
@ApiBearerAuth()
@ApiOperation({
summary: "Get a user's meta and related data for user profile page."
})
async getUserDetail(
@CurrentUser() currentUser: UserEntity,
@Body() request: GetUserDetailRequestDto
): Promise<GetUserDetailResponseDto> {
const user = request.username
? await this.userService.findUserByUsername(request.username)
: await this.userService.findUserById(request.userId);
if (!user)
return {
error: GetUserDetailResponseError.NO_SUCH_USER
};
const days = 53 * 7 + 6;
const [userInformation, submissionCountPerDay, rank, hasPrivilege] = await Promise.all([
this.userService.findUserInformationByUserId(user.id),
this.submissionService.getUserRecentlySubmissionCountPerDay(user, days, request.timezone, request.now),
this.userService.getUserRank(user),
currentUser &&
(currentUser.id === user.id ||
this.userPrivilegeService.userHasPrivilege(currentUser, UserPrivilegeType.ManageUser))
]);
return {
meta: await this.userService.getUserMeta(user, currentUser),
information: {
organization: userInformation.organization,
location: userInformation.location,
url: userInformation.url,
telegram: userInformation.telegram,
qq: userInformation.qq,
github: userInformation.github
},
submissionCountPerDay,
rank,
hasPrivilege
};
}
@Post("getUserProfile")
@ApiBearerAuth()
@ApiOperation({
summary: "Get a user's meta and information for user profile edit page."
})
async getUserProfile(
@CurrentUser() currentUser: UserEntity,
@Body() request: GetUserProfileRequestDto
): Promise<GetUserProfileResponseDto> {
if (!currentUser)
return {
error: GetUserProfileResponseError.PERMISSION_DENIED
};
const user = request.username
? await this.userService.findUserByUsername(request.username)
: await this.userService.findUserById(request.userId);
if (!user)
return {
error: GetUserProfileResponseError.NO_SUCH_USER
};
if (
currentUser.id !== user.id &&
!(await this.userPrivilegeService.userHasPrivilege(currentUser, UserPrivilegeType.ManageUser))
)
return {
error: GetUserProfileResponseError.PERMISSION_DENIED
};
const userInformation = await this.userService.findUserInformationByUserId(user.id);
return {
meta: await this.userService.getUserMeta(user, currentUser),
publicEmail: user.publicEmail,
avatarInfo: user.avatarInfo,
information: {
organization: userInformation.organization,
location: userInformation.location,
url: userInformation.url,
telegram: userInformation.telegram,
qq: userInformation.qq,
github: userInformation.github
}
};
}
@Post("getUserPreference")
@ApiBearerAuth()
@ApiOperation({
summary: "Get a user's meta and preference for user profile edit page."
})
async getUserPreference(
@CurrentUser() currentUser: UserEntity,
@Body() request: GetUserPreferenceRequestDto
): Promise<GetUserPreferenceResponseDto> {
if (!currentUser)
return {
error: GetUserPreferenceResponseError.PERMISSION_DENIED
};
const user = request.username
? await this.userService.findUserByUsername(request.username)
: await this.userService.findUserById(request.userId);
if (!user)
return {
error: GetUserPreferenceResponseError.NO_SUCH_USER
};
if (
currentUser.id !== user.id &&
!(await this.userPrivilegeService.userHasPrivilege(currentUser, UserPrivilegeType.ManageUser))
)
return {
error: GetUserPreferenceResponseError.PERMISSION_DENIED
};
return {
meta: await this.userService.getUserMeta(user, currentUser),
preference: await this.userService.getUserPreference(user)
};
}
@Post("updateUserPreference")
@ApiBearerAuth()
@ApiOperation({
summary: "Update a user's preference."
})
async updateUserPreference(
@CurrentUser() currentUser: UserEntity,
@Body() request: UpdateUserPreferenceRequestDto
): Promise<UpdateUserPreferenceResponseDto> {
if (!currentUser)
return {
error: UpdateUserPreferenceResponseError.PERMISSION_DENIED
};
const user = await this.userService.findUserById(request.userId);
if (!user)
return {
error: UpdateUserPreferenceResponseError.NO_SUCH_USER
};
if (
currentUser.id !== user.id &&
!(await this.userPrivilegeService.userHasPrivilege(currentUser, UserPrivilegeType.ManageUser))
)
return {
error: UpdateUserPreferenceResponseError.PERMISSION_DENIED
};
await this.userService.updateUserPreference(user, request.preference);
return {};
}
@Post("getUserSecuritySettings")
@ApiBearerAuth()
@ApiOperation({
summary: "Get a user's security settings for user settings page."
})
async getUserSecuritySettings(
@CurrentUser() currentUser: UserEntity,
@Body() request: GetUserSecuritySettingsRequestDto
): Promise<GetUserSecuritySettingsResponseDto> {
if (!currentUser)
return {
error: GetUserSecuritySettingsResponseError.PERMISSION_DENIED
};
const user = request.username
? await this.userService.findUserByUsername(request.username)
: await this.userService.findUserById(request.userId);
if (!user)
return {
error: GetUserSecuritySettingsResponseError.NO_SUCH_USER
};
if (
currentUser.id !== user.id &&
!(await this.userPrivilegeService.userHasPrivilege(currentUser, UserPrivilegeType.ManageUser))
)
return {
error: GetUserSecuritySettingsResponseError.PERMISSION_DENIED
};
return {
meta: await this.userService.getUserMeta(user, currentUser)
};
}
@Post("queryAuditLogs")
@ApiBearerAuth()
@ApiOperation({
summary: "Query audit logs."
})
async queryAuditLogs(
@CurrentUser() currentUser: UserEntity,
@Body() request: QueryAuditLogsRequestDto
): Promise<QueryAuditLogsResponseDto> {
if (request.takeCount > this.configService.config.queryLimit.userAuditLogs)
return {
error: QueryAuditLogsResponseError.TAKE_TOO_MANY
};
if (!currentUser)
return {
error: QueryAuditLogsResponseError.PERMISSION_DENIED
};
if (
(request.username ? currentUser.username !== request.username : currentUser.id !== request.userId) &&
!(await this.userPrivilegeService.userHasPrivilege(currentUser, UserPrivilegeType.ManageUser))
)
return {
error: QueryAuditLogsResponseError.PERMISSION_DENIED
};
const user = request.username
? await this.userService.findUserByUsername(request.username)
: await this.userService.findUserById(request.userId);
if (request.username != null && request.userId != null && !user)
return {
error: QueryAuditLogsResponseError.NO_SUCH_USER
};
const [results, count] = await this.auditService.query(
user?.id,
request.actionQuery,
request.ip,
request.firstObjectId,
request.secondObjectId,
request.locale,
currentUser,
request.skipCount,
request.takeCount
);
return {
results: await Promise.all(
results.map(async result => ({
user: await this.userService.getUserMeta(
result.userId === user?.id ? user : await this.userService.findUserById(result.userId),
currentUser
),
ip: result.ip,
ipLocation: this.authIpLocationService.query(result.ip),
time: result.time,
action: result.action,
firstObjectType: result.firstObjectType,
firstObjectId: result.firstObjectId,
firstObject: result.firstObject,
secondObjectType: result.secondObjectType,
secondObjectId: result.secondObjectId,
secondObject: result.secondObject,
details: result.details
}))
),
count
};
}
@Post("updateUserPassword")
@ApiBearerAuth()
@ApiOperation({
summary: "Change a user's password by its old password."
})
async updateUserPassword(
@CurrentUser() currentUser: UserEntity,
@Body() request: UpdateUserPasswordRequestDto
): Promise<UpdateUserPasswordResponseDto> {
const user = await this.userService.findUserById(request.userId);
if (!user)
return {
error: UpdateUserPasswordResponseError.NO_SUCH_USER
};
if (!currentUser)
return {
error: UpdateUserPasswordResponseError.PERMISSION_DENIED
};
const isUserSelf = currentUser.id === user.id;
const hasPrivilege = await this.userPrivilegeService.userHasPrivilege(currentUser, UserPrivilegeType.ManageUser);
if (!(isUserSelf || hasPrivilege))
return {
error: UpdateUserPasswordResponseError.PERMISSION_DENIED
};
// A non-admin user must give the old password to change its password
const userAuth = await this.authService.findUserAuthByUserId(request.userId);
if (!hasPrivilege) {
if (!(await this.authService.checkPassword(userAuth, request.oldPassword)))
return {
error: UpdateUserPasswordResponseError.WRONG_OLD_PASSWORD
};
}
if (this.authService.checkUserMigrated(userAuth)) await this.authService.changePassword(userAuth, request.password);
else {
// If the user has NOT been migrated, change its "password in old system"
const userMigrationInfo = await this.userMigrationService.findUserMigrationInfoByUserId(user.id);
await this.userMigrationService.changeOldPassword(userMigrationInfo, request.password);
}
if (request.userId === user.id) {
await this.auditService.log("auth.change_password");
} else {
await this.auditService.log("auth.change_others_password", AuditLogObjectType.User, user.id);
}
return {};
}
@Post("updateUserSelfEmail")
@ApiBearerAuth()
@ApiOperation({
summary: "Change the current user itself's email."
})
async updateUserSelfEmail(
@CurrentUser() currentUser: UserEntity,
@Body() request: UpdateUserSelfEmailRequestDto
): Promise<UpdateUserSelfEmailResponseDto> {
if (!currentUser)
return {
error: UpdateUserSelfEmailResponseError.PERMISSION_DENIED
};
const oldEmail = currentUser.email;
const error = await this.userService.updateUserSelfEmail(currentUser, request.email, request.emailVerificationCode);
if (oldEmail !== request.email) {
await this.auditService.log("auth.change_email", {
oldEmail,
newEmail: request.email
});
}
if (!error) return {};
return {
error
};
}
} | the_stack |
import {
ACTIONS_BYTE_TO_PAYLOAD as ABP,
ACTIONS_BYTE_TO_TEXT as ABT,
AUTH_ACTIONS as AA,
CONNECTION_ACTIONS as CA,
DEEPSTREAM_TYPES as TYPES,
EVENT_ACTIONS as EA,
MESSAGE_PART_SEPERATOR as y,
MESSAGE_SEPERATOR as x,
PRESENCE_ACTIONS as UA,
RECORD_ACTIONS as RA,
RPC_ACTIONS as PA,
TOPIC,
TOPIC_BYTE_TO_TEXT as TBT,
PAYLOAD_ENCODING,
} from './constants'
import { Message } from '../../../../constants'
import { correlationIdToVersion, bulkNameToCorrelationId } from './message-parser'
const WA = y + JSON.stringify({ writeSuccess: true })
const NWA = y + '{}'
const A = 'A' + y
const genericError = (msg: Message) => `${TBT[msg.topic]}${y}E${y}${msg.correlationId}${y}${msg.parsedData}${x}`
const invalidMessageData = (msg: Message) => `${TBT[msg.topic]}${y}E${y}INVALID_MESSAGE_DATA${y}${msg.data}${x}`
const messagePermissionError = (msg: Message) => `${TBT[msg.topic]}${y}E${y}MESSAGE_PERMISSION_ERROR${y}${msg.name}${ABT[msg.topic][msg.action] ? y + ABT[msg.topic][msg.action] : '' }${msg.correlationId ? y + msg.correlationId : '' }${x}`
const messageDenied = (msg: Message) => {
let version
if (msg.topic === TOPIC.RECORD.BYTE && msg.correlationId) {
version = correlationIdToVersion.get(msg.correlationId!)
correlationIdToVersion.delete(msg.correlationId!)
delete msg.correlationId
}
return `${TBT[msg.topic]}${y}E${y}MESSAGE_DENIED${y}${msg.name}${ABT[msg.topic][msg.action] ? y + ABT[msg.topic][msg.action] : '' }${msg.originalAction ? y + ABT[msg.topic][msg.originalAction] : '' }${msg.correlationId ? y + msg.correlationId : '' }${version !== undefined ? y + version : '' }${x}`
}
const notSubscribed = (msg: Message) => `${TBT[msg.topic]}${y}E${y}NOT_SUBSCRIBED${y}${msg.name}${x}`
const invalidAuth = (msg: Message) => `A${y}E${y}INVALID_AUTH_DATA${y}${msg.data ? msg.data : 'U' }${x}`
const recordUpdate = (msg: Message) => `R${y}U${y}${msg.name}${y}${msg.version}${y}${msg.data}${msg.isWriteAck ? WA : '' }${x}`
const recordPatch = (msg: Message) => `R${y}P${y}${msg.name}${y}${msg.version}${y}${msg.path}${y}${msg.data}${msg.isWriteAck ? WA : '' }${x}`
const subscriptionForPatternFound = (msg: Message) => `${TBT[msg.topic]}${y}SP${y}${msg.name}${y}${msg.subscription}${x}`
const subscriptionForPatternRemoved = (msg: Message) => `${TBT[msg.topic]}${y}SR${y}${msg.name}${y}${msg.subscription}${x}`
const listen = (msg: Message, isAck: boolean) => `${TBT[msg.topic]}${y}${isAck ? A : '' }L${y}${msg.name}${x}`
const unlisten = (msg: Message, isAck: boolean) => `${TBT[msg.topic]}${y}${isAck ? A : '' }UL${y}${msg.name}${x}`
const listenAccept = (msg: Message) => `${TBT[msg.topic]}${y}LA${y}${msg.name}${y}${msg.subscription}${x}`
const listenReject = (msg: Message) => `${TBT[msg.topic]}${y}LR${y}${msg.name}${y}${msg.subscription}${x}`
const multipleSubscriptions = (msg: Message) => `${TBT[msg.topic]}${y}E${y}MULTIPLE_SUBSCRIPTIONS${y}${msg.name}${x}`
const BUILDERS = {
[TOPIC.CONNECTION.BYTE]: {
[CA.ERROR.BYTE]: genericError,
[CA.CHALLENGE.BYTE]: (msg: Message) => `C${y}CH${x}`,
[CA.ACCEPT.BYTE]: (msg: Message) => `C${y}A${x}`,
[CA.REJECTION.BYTE]: (msg: Message) => `C${y}REJ${y}${msg.data}${x}`,
[CA.REDIRECT.BYTE]: (msg: Message) => `C${y}RED${y}${msg.data}${x}`,
[CA.PING.BYTE]: (msg: Message) => `C${y}PI${x}`,
[CA.PONG.BYTE]: (msg: Message) => `C${y}PO${x}`,
[CA.CONNECTION_AUTHENTICATION_TIMEOUT.BYTE]: (msg: Message) => `C${y}E${y}CONNECTION_AUTHENTICATION_TIMEOUT${x}`,
},
[TOPIC.AUTH.BYTE]: {
[AA.ERROR.BYTE]: genericError,
[AA.REQUEST.BYTE]: (msg: Message) => `A${y}REQ${y}${msg.data}${x}`,
[AA.AUTH_SUCCESSFUL.BYTE]: (msg: Message) => `A${y}A${msg.data ? y + msg.data : ''}${x}`,
[AA.AUTH_UNSUCCESSFUL.BYTE]: invalidAuth,
[AA.INVALID_MESSAGE_DATA.BYTE]: invalidAuth,
[AA.TOO_MANY_AUTH_ATTEMPTS.BYTE]: (msg: Message) => `A${y}E${y}TOO_MANY_AUTH_ATTEMPTS${x}`,
},
[TOPIC.EVENT.BYTE]: {
[EA.ERROR.BYTE]: genericError,
[EA.SUBSCRIBE.BYTE]: (msg: Message, isAck: boolean) => {
let name = msg.name
if (isAck) {
name = bulkNameToCorrelationId.get(msg.correlationId!)
bulkNameToCorrelationId.delete(msg.correlationId!)
}
return `E${y}${isAck ? A : '' }S${y}${name}${x}`
},
[EA.UNSUBSCRIBE.BYTE]: (msg: Message, isAck: boolean) => {
let name = msg.name
if (isAck) {
name = bulkNameToCorrelationId.get(msg.correlationId!)
bulkNameToCorrelationId.delete(msg.correlationId!)
}
return `E${y}${isAck ? A : '' }US${y}${name}${x}`
},
[EA.EMIT.BYTE]: (msg: Message) => `E${y}EVT${y}${msg.name}${y}${msg.data ? msg.data : 'U'}${x}`,
[EA.LISTEN.BYTE]: listen,
[EA.UNLISTEN.BYTE]: unlisten,
[EA.LISTEN_ACCEPT.BYTE]: listenAccept,
[EA.LISTEN_REJECT.BYTE]: listenReject,
[EA.SUBSCRIPTION_FOR_PATTERN_FOUND.BYTE]: subscriptionForPatternFound,
[EA.SUBSCRIPTION_FOR_PATTERN_REMOVED.BYTE]: subscriptionForPatternRemoved,
[EA.INVALID_MESSAGE_DATA.BYTE]: invalidMessageData,
[EA.MESSAGE_DENIED.BYTE]: messageDenied,
[EA.MESSAGE_PERMISSION_ERROR.BYTE]: messagePermissionError,
[EA.NOT_SUBSCRIBED.BYTE]: notSubscribed,
[EA.MULTIPLE_SUBSCRIPTIONS.BYTE]: multipleSubscriptions,
},
[TOPIC.RECORD.BYTE]: {
[RA.ERROR.BYTE]: genericError,
[RA.HEAD.BYTE]: (msg: Message) => `R${y}HD${y}${msg.name}${x}`,
[RA.HEAD_RESPONSE.BYTE]: (msg: Message) => `R${y}HD${y}${msg.name}${y}${msg.version}${y}null${x}`,
[RA.READ.BYTE]: (msg: Message) => `R${y}R${y}${msg.name}${x}`,
[RA.READ_RESPONSE.BYTE]: (msg: Message) => `R${y}R${y}${msg.name}${y}${msg.version}${y}${msg.data}${x}`,
[RA.UPDATE.BYTE]: recordUpdate,
[RA.PATCH.BYTE]: recordPatch,
[RA.ERASE.BYTE]: (msg: Message) => `R${y}P${y}${msg.name}${y}${msg.version}${y}${msg.path}${y}U${msg.isWriteAck ? WA : '' }${x}`,
[RA.CREATEANDUPDATE.BYTE]: (msg: Message) => `R${y}CU${y}${msg.name}${y}${msg.version}${y}${msg.data}${msg.isWriteAck ? WA : NWA }${x}`,
[RA.CREATEANDPATCH.BYTE]: (msg: Message) => `R${y}CU${y}${msg.name}${y}${msg.version}${y}${msg.path}${y}${msg.data}${msg.isWriteAck ? WA : NWA }${x}`,
[RA.DELETE.BYTE]: (msg: Message, isAck: boolean) => `R${y}${isAck ? A : '' }D${y}${msg.name}${x}`,
[RA.DELETED.BYTE]: (msg: Message) => `R${y}A${y}D${y}${msg.name}${x}`,
[RA.DELETE_SUCCESS.BYTE]: (msg: Message) => `R${y}A${y}D${y}${msg.name}${x}`,
[RA.SUBSCRIBECREATEANDREAD.BYTE]: (msg: Message, isAck: boolean) => {
if (isAck) {
return `R${y}A${y}S${y}${msg.name}${x}`
}
return `R${y}CR${y}${msg.name}${x}`
},
[RA.UNSUBSCRIBE.BYTE]: (msg: Message, isAck: boolean) => {
let name = msg.name
if (isAck) {
name = bulkNameToCorrelationId.get(msg.correlationId!)
bulkNameToCorrelationId.delete(msg.correlationId!)
}
return `R${y}${isAck ? A : '' }US${y}${name}${x}`
},
[RA.WRITE_ACKNOWLEDGEMENT.BYTE]: (msg: Message) => {
return `R${y}WA${y}${msg.name}${y}[${correlationIdToVersion.get(msg.correlationId!)}]${y}${TYPES.NULL}${x}`
},
[RA.LISTEN.BYTE]: listen,
[RA.LISTEN_RESPONSE_TIMEOUT.BYTE]: (msg: Message) => `C${y}PO${x}`,
[RA.UNLISTEN.BYTE]: unlisten,
[RA.LISTEN_ACCEPT.BYTE]: listenAccept,
[RA.LISTEN_REJECT.BYTE]: listenReject,
[RA.SUBSCRIPTION_FOR_PATTERN_FOUND.BYTE]: subscriptionForPatternFound,
[RA.SUBSCRIPTION_FOR_PATTERN_REMOVED.BYTE]: subscriptionForPatternRemoved,
[RA.SUBSCRIPTION_HAS_PROVIDER.BYTE]: (msg: Message) => `R${y}SH${y}${msg.name}${y}T${x}`,
[RA.SUBSCRIPTION_HAS_NO_PROVIDER.BYTE]: (msg: Message) => `R${y}SH${y}${msg.name}${y}F${x}`,
[RA.STORAGE_RETRIEVAL_TIMEOUT.BYTE]: (msg: Message) => `R${y}E${y}STORAGE_RETRIEVAL_TIMEOUT${y}${msg.name}${x}`,
[RA.CACHE_RETRIEVAL_TIMEOUT.BYTE]: (msg: Message) => `R${y}E${y}CACHE_RETRIEVAL_TIMEOUT${y}${msg.name}${x}`,
[RA.VERSION_EXISTS.BYTE]: (msg: Message) => `R${y}E${y}VERSION_EXISTS${y}${msg.name}${y}${msg.version}${y}${msg.data}${msg.isWriteAck ? WA : ''}${x}`,
[RA.RECORD_NOT_FOUND.BYTE]: (msg: Message) => `R${y}E${y}RECORD_NOT_FOUND${y}${msg.name}${x}`,
[RA.INVALID_MESSAGE_DATA.BYTE]: invalidMessageData,
[RA.MESSAGE_DENIED.BYTE]: messageDenied,
[RA.MESSAGE_PERMISSION_ERROR.BYTE]: messagePermissionError,
[RA.NOT_SUBSCRIBED.BYTE]: notSubscribed,
[RA.MULTIPLE_SUBSCRIPTIONS.BYTE]: multipleSubscriptions,
},
[TOPIC.RPC.BYTE]: {
[PA.ERROR.BYTE]: genericError,
[PA.PROVIDE.BYTE]: (msg: Message, isAck: boolean) => {
let name = msg.name
if (isAck) {
name = bulkNameToCorrelationId.get(msg.correlationId!)
bulkNameToCorrelationId.delete(msg.correlationId!)
}
return `P${y}${isAck ? A : '' }S${y}${name}${x}`
},
[PA.UNPROVIDE.BYTE]: (msg: Message, isAck: boolean) => {
let name = msg.name
if (isAck) {
name = bulkNameToCorrelationId.get(msg.correlationId!)
bulkNameToCorrelationId.delete(msg.correlationId!)
}
return `P${y}${isAck ? A : '' }US${y}${name}${x}`
},
[PA.REQUEST.BYTE]: (msg: Message) => `P${y}REQ${y}${msg.name}${y}${msg.correlationId}${y}${msg.data}${x}`,
[PA.RESPONSE.BYTE]: (msg: Message) => `P${y}RES${y}${msg.name}${y}${msg.correlationId}${y}${msg.data}${x}`,
[PA.REQUEST_ERROR.BYTE]: (msg: Message) => `P${y}E${y}${msg.data}${y}${msg.name}${y}${msg.correlationId}${x}`,
[PA.REJECT.BYTE]: (msg: Message) => `P${y}REJ${y}${msg.name}${y}${msg.correlationId}${x}`,
[PA.ACCEPT.BYTE]: (msg: Message) => `P${y}A${y}REQ${y}${msg.name}${y}${msg.correlationId}${x}`,
[PA.NO_RPC_PROVIDER.BYTE]: (msg: Message) => `P${y}E${y}NO_RPC_PROVIDER${y}${msg.name}${y}${msg.correlationId}${x}`,
[PA.INVALID_RPC_CORRELATION_ID.BYTE]: (msg: Message) => `P${y}E${y}INVALID_RPC_CORRELATION_ID${y}${msg.name}${y}${msg.correlationId}${x}`,
[PA.RESPONSE_TIMEOUT.BYTE]: (msg: Message) => `P${y}E${y}RESPONSE_TIMEOUT${y}${msg.name}${y}${msg.correlationId}${x}`,
[PA.MULTIPLE_RESPONSE.BYTE]: (msg: Message) => `P${y}E${y}MULTIPLE_RESPONSE${y}${msg.name}${y}${msg.correlationId}${x}`,
[PA.MULTIPLE_ACCEPT.BYTE]: (msg: Message) => `P${y}E${y}MULTIPLE_ACCEPT${y}${msg.name}${y}${msg.correlationId}${x}`,
[PA.ACCEPT_TIMEOUT.BYTE]: (msg: Message) => `P${y}E${y}ACCEPT_TIMEOUT${y}${msg.name}${y}${msg.correlationId}${x}`,
[PA.INVALID_MESSAGE_DATA.BYTE]: invalidMessageData,
[PA.MESSAGE_DENIED.BYTE]: messageDenied,
[PA.MESSAGE_PERMISSION_ERROR.BYTE]: messagePermissionError,
[PA.NOT_PROVIDED.BYTE]: notSubscribed,
[PA.MULTIPLE_PROVIDERS.BYTE]: multipleSubscriptions,
},
[TOPIC.PRESENCE.BYTE]: {
[UA.ERROR.BYTE]: genericError,
[UA.SUBSCRIBE.BYTE]: (msg: Message, isAck: boolean) => `U${y}${isAck ? A : '' }S${y}${msg.correlationId ? msg.correlationId + y : '' }${msg.name ? msg.name : msg.data}${x}`,
[UA.SUBSCRIBE_ALL.BYTE]: (msg: Message, isAck: boolean) => `U${y}${isAck ? A : '' }S${y}S${x}`,
[UA.UNSUBSCRIBE.BYTE]: (msg: Message, isAck: boolean) => `U${y}${isAck ? A : '' }US${y}${msg.correlationId ? msg.correlationId + y : '' }${msg.name ? msg.name : msg.data}${x}`,
[UA.UNSUBSCRIBE_ALL.BYTE]: (msg: Message, isAck: boolean) => `U${y}${isAck ? A : '' }US${y}US${x}`,
[UA.QUERY.BYTE]: (msg: Message) => `U${y}Q${y}${msg.correlationId}${y}${msg.data}${x}`,
[UA.QUERY_RESPONSE.BYTE]: (msg: Message) => `U${y}Q${y}${msg.correlationId}${y}${msg.data}${x}`,
[UA.QUERY_ALL.BYTE]: (msg: Message) => `U${y}Q${y}Q${x}`,
[UA.QUERY_ALL_RESPONSE.BYTE]: (msg: Message) => `U${y}Q${(msg.names as string[]).length > 0 ? y + (msg.names as string[]).join(y) : '' }${x}`,
[UA.PRESENCE_JOIN.BYTE]: (msg: Message) => `U${y}PNJ${y}${msg.name}${x}`,
[UA.PRESENCE_JOIN_ALL.BYTE]: (msg: Message) => `U${y}PNJ${y}${msg.name}${x}`,
[UA.PRESENCE_LEAVE.BYTE]: (msg: Message) => `U${y}PNL${y}${msg.name}${x}`,
[UA.PRESENCE_LEAVE_ALL.BYTE]: (msg: Message) => `U${y}PNL${y}${msg.name}${x}`,
[UA.INVALID_PRESENCE_USERS.BYTE]: (msg: Message) => `U${y}E${y}INVALID_PRESENCE_USERS${y}${msg.data}${x}`,
[UA.MESSAGE_DENIED.BYTE]: messageDenied,
[UA.MESSAGE_PERMISSION_ERROR.BYTE]: messagePermissionError,
[UA.NOT_SUBSCRIBED.BYTE]: notSubscribed,
[UA.MULTIPLE_SUBSCRIPTIONS.BYTE]: multipleSubscriptions,
},
}
/**
* Creates a deepstream message string, based on the
* provided parameters
*/
export const getMessage = (message: Message, isAck: boolean = false): string => {
if (!BUILDERS[message.topic] || !BUILDERS[message.topic][message.action]) {
console.trace('missing builder for', message, isAck)
return ''
}
const builder = BUILDERS[message.topic][message.action]
if (
!message.parsedData && !message.data &&
(
(message.topic === TOPIC.RPC.BYTE && (message.action === PA.RESPONSE.BYTE || message.action === PA.REQUEST.BYTE)) ||
(message.topic === TOPIC.RECORD.BYTE && (message.action === RA.PATCH.BYTE || message.action === RA.ERASE.BYTE))
)
) {
message.data = 'U'
} else if (message.parsedData) {
if (ABP[message.topic][message.action] === PAYLOAD_ENCODING.DEEPSTREAM) {
message.data = typed(message.parsedData)
} else {
message.data = JSON.stringify(message.parsedData)
}
} else if (message.data && ABP[message.topic][message.action] === PAYLOAD_ENCODING.DEEPSTREAM) {
message.data = typed(JSON.parse(message.data.toString()))
}
return builder(message, isAck)
}
/**
* Converts a serializable value into its string-representation and adds
* a flag that provides instructions on how to deserialize it.
*
* Please see messageParser.convertTyped for the counterpart of this method
*/
export const typed = function (value: any): string {
const type = typeof value
if (type === 'string') {
return TYPES.STRING + value
}
if (value === null) {
return TYPES.NULL
}
if (type === 'object') {
return TYPES.OBJECT + JSON.stringify(value)
}
if (type === 'number') {
return TYPES.NUMBER + value.toString()
}
if (value === true) {
return TYPES.TRUE
}
if (value === false) {
return TYPES.FALSE
}
if (value === undefined) {
return TYPES.UNDEFINED
}
throw new Error(`Can't serialize type ${value}`)
} | the_stack |
import dedent from 'dedent'
import { MarkdownString } from 'vscode'
export enum DirectiveType {
Global = 'Global',
Local = 'Local',
}
export enum DirectiveDefinedIn {
FrontMatter = 'frontMatter',
Comment = 'comment',
}
const directiveAlwaysAllowed = [
DirectiveDefinedIn.FrontMatter,
DirectiveDefinedIn.Comment,
] as const
export enum DirectiveProvidedBy {
Marpit = 'Marpit framework',
MarpCore = 'Marp Core',
MarpCLI = 'Marp CLI',
MarpVSCode = 'Marp for VS Code',
}
interface DirectiveInfoBase {
allowed: readonly DirectiveDefinedIn[]
completable?: boolean
description: string
details?: string
markdownDescription: MarkdownString
markdownDetails: MarkdownString
name: string
providedBy: DirectiveProvidedBy
type: DirectiveType
}
export type GlobalDirectiveInfo = DirectiveInfoBase & {
scoped?: never
type: DirectiveType.Global
}
export type LocalDirectiveInfo = DirectiveInfoBase & {
scoped?: boolean
type: DirectiveType.Local
}
export type DirectiveInfo = GlobalDirectiveInfo | LocalDirectiveInfo
export const createDirectiveInfo = (
info:
| Omit<GlobalDirectiveInfo, 'markdownDescription' | 'markdownDetails'>
| Omit<LocalDirectiveInfo, 'markdownDescription' | 'markdownDetails'>
): Readonly<DirectiveInfo> => {
const directiveText = `\`${info.name}\` [${
info.type
} directive](https://marpit.marp.app/directives?id=${info.type.toLowerCase()}-directives)${
info.scoped ? ' _[Scoped]_' : ''
}`
const mdDetails = `_Provided by ${info.providedBy}${
info.details ? ` ([See more details...](${info.details}))` : ''
}_`
return Object.freeze({
...info,
markdownDetails: new MarkdownString(mdDetails),
markdownDescription: new MarkdownString(
[directiveText, info.description, mdDetails].join('\n\n---\n\n'),
true
),
})
}
export const builtinDirectives = [
// Marp for VS Code
createDirectiveInfo({
name: 'marp',
description: 'Set whether or not enable Marp feature in VS Code.',
allowed: [DirectiveDefinedIn.FrontMatter],
providedBy: DirectiveProvidedBy.MarpVSCode,
type: DirectiveType.Global,
details: 'https://github.com/marp-team/marp-vscode#usage',
}),
// Marpit global directives
createDirectiveInfo({
name: 'theme',
description: dedent(`
Set a theme name of the slide deck.
You can choose from [Marp Core built-in themes](https://github.com/marp-team/marp-core/tree/main/themes) or registered custom themes.
`),
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.Marpit,
type: DirectiveType.Global,
details: 'https://marpit.marp.app/directives?id=theme',
completable: true,
}),
createDirectiveInfo({
name: 'style',
description: dedent(`
Specify CSS for tweaking theme.
It is exactly same as defining inline style within Markdown. Useful if \`<style>\` would break the view in the other Markdown tools.
\`\`\`yaml
style: |
section {
background-color: #123;
color: #def;
}
\`\`\`
`),
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.Marpit,
type: DirectiveType.Global,
details: 'https://marpit.marp.app/directives?id=tweak-theme-style',
}),
createDirectiveInfo({
name: 'headingDivider',
description: dedent(`
Specify heading divider option.
You may instruct to divide slide pages at before of headings automatically. It is useful for making slide from existing Markdown document.
It have to specify heading level from 1 to 6, or array of them. This feature is enabled at headings having the level _higher than or equal to the specified value_ if in a number, and it is enabled at _only specified levels_ if in array.
\`\`\`yaml
# Divide pages by headings having level 3 and higher (#, ##, ###)
headingDivider: 3
# Divide pages by only headings having level 1 and 3 (#, ###)
headingDivider: [1, 3]
\`\`\`
`),
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.Marpit,
type: DirectiveType.Global,
details: 'https://marpit.marp.app/directives?id=heading-divider',
}),
// Marpit local directives
createDirectiveInfo({
name: 'paginate',
description: 'Show page number on the slide if set `true`.',
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.Marpit,
type: DirectiveType.Local,
details: 'https://marpit.marp.app/directives?id=pagination',
completable: true,
}),
createDirectiveInfo({
name: 'header',
description: dedent(`
Set the content of slide header.
The content of header can use basic Markdown formatting. To prevent the broken parsing by YAML special characters, recommend to wrap by quotes \`"\` or \`'\` when used Markdown syntax:
\`\`\`yaml
header: "**Header content**"
\`\`\`
To clear the header content in the middle of slides, set an empty string:
\`\`\`yaml
header: ""
\`\`\`
`),
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.Marpit,
type: DirectiveType.Local,
details: 'https://marpit.marp.app/directives?id=header-and-footer',
}),
createDirectiveInfo({
name: 'footer',
description: dedent(`
Set the content of slide footer.
The content of footer can use basic Markdown formatting. To prevent the broken parsing by YAML special characters, recommend to wrap by quotes \`"\` or \`'\` when used Markdown syntax:
\`\`\`yaml
footer: "**Footer content**"
\`\`\`
To clear the footer content in the middle of slides, set an empty string:
\`\`\`yaml
footer: ""
\`\`\`
`),
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.Marpit,
type: DirectiveType.Local,
details: 'https://marpit.marp.app/directives?id=header-and-footer',
}),
createDirectiveInfo({
name: 'class',
description:
'Set [HTML `class` attribute](https://developer.mozilla.org/en-US/docs/Web/HTML/Global_attributes/class) for the slide element `<section>`.',
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.Marpit,
type: DirectiveType.Local,
details: 'https://marpit.marp.app/directives?id=class',
}),
createDirectiveInfo({
name: 'backgroundColor',
description:
'Set [`background-color` style](https://developer.mozilla.org/en-US/docs/Web/CSS/background-color) of the slide.',
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.Marpit,
type: DirectiveType.Local,
details: 'https://marpit.marp.app/directives?id=backgrounds',
}),
createDirectiveInfo({
name: 'backgroundImage',
description:
'Set [`background-image` style](https://developer.mozilla.org/en-US/docs/Web/CSS/background-image) of the slide.',
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.Marpit,
type: DirectiveType.Local,
details: 'https://marpit.marp.app/directives?id=backgrounds',
}),
createDirectiveInfo({
name: 'backgroundPosition',
description:
'Set [`background-position` style](https://developer.mozilla.org/en-US/docs/Web/CSS/background-position) of the slide.',
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.Marpit,
type: DirectiveType.Local,
details: 'https://marpit.marp.app/directives?id=backgrounds',
}),
createDirectiveInfo({
name: 'backgroundRepeat',
description:
'Set [`background-repeat` style](https://developer.mozilla.org/en-US/docs/Web/CSS/background-repeat) of the slide.',
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.Marpit,
type: DirectiveType.Local,
details: 'https://marpit.marp.app/directives?id=backgrounds',
}),
createDirectiveInfo({
name: 'backgroundSize',
description:
'Set [`background-size` style](https://developer.mozilla.org/en-US/docs/Web/CSS/background-size) of the slide.',
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.Marpit,
type: DirectiveType.Local,
details: 'https://marpit.marp.app/directives?id=backgrounds',
}),
createDirectiveInfo({
name: 'color',
description:
'Set [`color` style](https://developer.mozilla.org/en-US/docs/Web/CSS/color) of the slide.',
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.Marpit,
type: DirectiveType.Local,
details: 'https://marpit.marp.app/directives?id=backgrounds',
}),
// Marp Core extension
createDirectiveInfo({
name: 'math',
description: dedent(`
Choose a library to render math typesetting in the current Markdown.
- \`katex\`: Use [KaTeX](https://katex.org/). It is generally known as faster parsing than MathJax.
- \`mathjax\`: Use [MathJax](https://www.mathjax.org/). It has more stable rendering and better syntax support.
Marp may change the default library of the ecosystem in the future. To prevent breaking existing slides, recommend to declare used library whenever to use math typesetting.
`),
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.MarpCore,
type: DirectiveType.Global,
details: 'https://github.com/marp-team/marp-core#math-global-directive',
completable: true,
}),
createDirectiveInfo({
name: 'size',
description: dedent(`
Choose the slide size preset provided by theme.
Accepted presets are depending on using theme. In the case of Marp Core built-in theme, you can choose from \`16:9\` (1280x720) or \`4:3\` (960x720).
`),
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.MarpCore,
type: DirectiveType.Global,
details: 'https://github.com/marp-team/marp-core#size-global-directive',
completable: true,
}),
// Marp CLI metadata options
createDirectiveInfo({
name: 'title',
description: 'Set title of the slide deck.',
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.MarpCLI,
type: DirectiveType.Global,
details: 'https://github.com/marp-team/marp-cli#metadata',
}),
createDirectiveInfo({
name: 'description',
description: 'Set description of the slide deck.',
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.MarpCLI,
type: DirectiveType.Global,
details: 'https://github.com/marp-team/marp-cli#metadata',
}),
createDirectiveInfo({
name: 'author',
description: 'Set author of the slide deck.',
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.MarpCLI,
type: DirectiveType.Global,
details: 'https://github.com/marp-team/marp-cli#metadata',
}),
createDirectiveInfo({
name: 'keywords',
description:
'Set keywords for the slide deck. It accepts a string consisted by comma-separated keywords, or YAML array of keywords.',
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.MarpCLI,
type: DirectiveType.Global,
details: 'https://github.com/marp-team/marp-cli#metadata',
}),
createDirectiveInfo({
name: 'url',
description: 'Set canonical URL for the slide deck.',
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.MarpCLI,
type: DirectiveType.Global,
details: 'https://github.com/marp-team/marp-cli#metadata',
}),
createDirectiveInfo({
name: 'image',
description: 'Set Open Graph image URL.',
allowed: directiveAlwaysAllowed,
providedBy: DirectiveProvidedBy.MarpCLI,
type: DirectiveType.Global,
details: 'https://github.com/marp-team/marp-cli#metadata',
}),
] as const | the_stack |
import { Logger } from '../project';
import FetchClass from './fetch';
import { URL } from 'url';
import crypto = require('crypto');
import { JspmError, JspmUserError, bold, highlight } from '../utils/common';
import zlib = require('zlib');
import peek = require('buffer-peek-stream');
import fs = require('graceful-fs');
import tar = require('tar-fs');
import Stream = require('stream');
import execGit = require('@jspm/github/exec-git');
import rimraf = require('rimraf');
import path = require('path');
const gitProtocol = {
resolve: gitResolve,
download: gitCheckout
};
export const sourceProtocols: {
[protocol: string]: {
resolve?: (log: Logger, fetch: FetchClass, source: string, timeout: number) => Promise<string>,
download?: (log: Logger, fetch: FetchClass, source: string, outDir: string, timeout: number) => Promise<void>
}
} = {
'git': gitProtocol,
'git+file': gitProtocol,
'git+ssh': gitProtocol,
'git+http': gitProtocol,
'git+https': gitProtocol,
'https': {
download: fetchRemoteTarball
},
'http': {
download: fetchRemoteTarball
},
'file': {
download: extractLocalTarball
},
'link': {}
};
function getProtocolHandler (source: string) {
const protocolIndex = source.indexOf(':');
const protocol = source.substr(0, protocolIndex);
const protocolHandler = sourceProtocols[protocol];
if (!protocolHandler)
throw new JspmUserError(`No handler available for source protocol ${bold(protocol)} processing ${source}.`);
return protocolHandler;
}
export async function resolveSource (log: Logger, fetch: FetchClass, source: string, timeout: number): Promise<string> {
const protocolHandler = getProtocolHandler(source);
if (!protocolHandler.resolve)
return source;
return protocolHandler.resolve(log, fetch, source, timeout);
}
export function downloadSource (log: Logger, fetch: FetchClass, source: string, outDir: string, timeout: number): Promise<void> {
const protocolHandler = getProtocolHandler(source);
if (!protocolHandler.download)
throw new JspmError(`Invalid attempt to download source ${source}`);
return protocolHandler.download(log, fetch, source, outDir, timeout);
}
async function gitResolve (log: Logger, fetch: FetchClass, source: string, timeout: number): Promise<string> {
let url = source.startsWith('git:') ? source : source.substr(4);
const gitRefIndex = url.lastIndexOf('#');
let gitRef = '';
if (gitRefIndex !== -1) {
gitRef = url.substr(gitRefIndex + 1);
url = url.substr(0, gitRefIndex);
}
const logEnd = log.taskStart(`Resolving git source ${highlight(source)}`);
try {
const execOpts = { timeout, killSignal: 'SIGKILL', maxBuffer: 100 * 1024 * 1024 };
let credentials = await fetch.getCredentials(url);
if (credentials.basicAuth) {
let urlObj = new URL(url);
({ username: urlObj.username, password: urlObj.password } = credentials.basicAuth);
url = urlObj.href;
}
try {
log.debug(`ls-remote ${url}${credentials.basicAuth ? ' (with basic auth)' : ''}`);
var stdout = await execGit(`ls-remote ${url} HEAD refs/tags/* refs/heads/*`, execOpts);
}
catch (err) {
const str = err.toString();
// not found
if (str.indexOf('not found') !== -1)
throw new JspmUserError(`Git source ${highlight(source)} not found.`);
// invalid credentials
if (str.indexOf('Invalid username or password') !== -1 || str.indexOf('fatal: could not read Username') !== -1)
throw new JspmUserError(`git authentication failed resolving ${highlight(source)}.
Make sure that git is locally configured with the correct permissions.`);
throw err;
}
let refs = stdout.split('\n');
let hashMatch;
for (let ref of refs) {
if (!ref)
continue;
let hash = ref.substr(0, ref.indexOf('\t'));
let refName = ref.substr(hash.length + 1);
if (!gitRef && refName === 'HEAD') {
hashMatch = hash;
break;
}
else if (refName.substr(0, 11) === 'refs/heads/') {
if (gitRef === refName.substr(11)) {
hashMatch = hash;
break;
}
}
else if (refName.substr(0, 10) === 'refs/tags/') {
if (refName.substr(refName.length - 3, 3) === '^{}') {
if (gitRef === refName.substr(10, refName.length - 13)) {
hashMatch = hash;
break;
}
}
else if (gitRef === refName.substr(10)) {
hashMatch = hash;
break;
}
}
}
if (!hashMatch)
throw new JspmUserError(`Unable to resolve the ${highlight(gitRef || 'head')} git reference for ${source}.`);
url += '#' + hashMatch;
if (!source.startsWith('git:'))
return 'git+' + url;
else
return url;
}
finally {
logEnd();
}
}
const gitRefRegEx = /^[a-f0-9]{6,}$/;
async function gitCheckout (log: Logger, _fetch: FetchClass, source: string, outDir: string, timeout: number) {
const execOpts = {
cwd: outDir,
timeout,
killSignal: 'SIGKILL',
maxBuffer: 100 * 1024 * 1024
};
let gitSource = source.startsWith('git:') ? source : source.substr(4);
let gitRefIndex = gitSource.lastIndexOf('#');
if (gitRefIndex === -1)
throw new JspmUserError(`Invalid source ${source}. Git sources must have an exact trailing # ref.`);
let gitRef = gitSource.substr(gitRefIndex + 1);
gitSource = gitSource.substr(0, gitRefIndex);
if (!gitRef.match(gitRefRegEx))
throw new JspmUserError(`Invalid source ${source}. Git source reference ${gitRef} must be a hash reference.`);
const local = source.startsWith('file:') ? '-l ' : '';
// this will work for tags and branches, but we want to encourage commit references for uniqueness so dont want to reward this use case unfortunately
// await execGit(`clone ${local}--depth=1 ${source.replace(/(['"()])/g, '\\\$1')} --branch ${ref.replace(/(['"()])/g, '\\\$1')} ${outDir}`, execOpts);
// TODO: better sanitize against source injections here
// do a full clone for the commit reference case
// credentials used by git will be standard git credential manager which should be relied on
const logEnd = log.taskStart('Cloning ' + highlight(source));
try {
await execGit(`clone ${gitRef ? '-n ' : ''}${local}${gitSource.replace(/(['"()])/g, '\\\$1')} ${outDir}`, execOpts);
if (gitRef)
await execGit(`checkout ${gitRef.replace(/(['"()])/g, '\\\$1')}`, execOpts);
}
catch (err) {
if (err.toString().indexOf('Repository not found') !== -1)
throw new JspmUserError(`Unable to find repo ${highlight(source)}. It may not exist, or authorization may be required.`);
throw err;
}
finally {
logEnd();
}
// once clone is successful, then we can remove the git directory
await new Promise((resolve, reject) => rimraf(path.join(outDir, '.git'), err => err ? reject(err) : resolve()));
}
async function fetchRemoteTarball (log: Logger, fetch: FetchClass, source: string, outDir: string) {
const { url, hashType, hash } = readSource(log, source);
const fetchOptions = {
headers: {
accept: 'application/octet-stream'
}
};
const href = url.href;
const logEnd = log.taskStart('Fetching ' + highlight(href));
try {
var res = await fetch.fetch(href, fetchOptions);
}
catch (err) {
switch (err.code) {
case 'ENOTFOUND':
if (err.toString().indexOf('getaddrinfo') === -1)
break;
case 'ECONNRESET':
case 'ETIMEDOUT':
case 'ESOCKETTIMEDOUT':
err.retriable = true;
err.hideStack = true;
}
throw err;
}
finally {
logEnd();
}
if (res.status !== 200)
throw new Error(`Bad download response code ${res.status} for ${source}`);
let validationPromise;
if (!hashType) {
validationPromise = Promise.resolve()
}
else {
const verifyHash = crypto.createHash(hashType);
// Validate downloaded hash
validationPromise = new Promise((resolve, reject) => {
res.body.pipe(verifyHash)
.pause()
.on('finish', () => {
let computedHash = <Buffer>verifyHash.read();
if (!computedHash.equals(hash)) {
let err = <JspmError>new Error(`Hash integrity compromised downloading ${href}.`);
err.hideStack = true;
reject(err);
}
resolve();
})
.on('error', reject);
});
}
let stream: any = await new Promise((resolve, reject) => {
// pipe through gunzip if a gzipped stream
peek(res.body, 3, (err, bytes, stream) => {
if (err)
reject(err);
else if (bytes[0] === 0x1f && bytes[1] === 0x8b && bytes[2] === 0x08)
resolve(stream.pipe(zlib.createGunzip()).pause());
else
resolve(stream.pause());
});
});
await Promise.all([
validationPromise,
// Unpack contents as a tar archive and save to targetDir
new Promise((resolve, reject) => {
stream.pipe(tar.extract(outDir, <any>{
// all dirs and files should be readable and writeable
dmode: 0o555,
fmode: 0o666,
strip: 1,
filter: function(_, header) {
return header.type !== 'file' && header.type !== 'directory'
}
}))
.on('finish', resolve)
.on('error', reject);
})
]);
}
async function extractLocalTarball (log: Logger, _fetch: FetchClass, source: string, outDir: string) {
const { url, hashType, hash } = readSource(log, source);
let stream = fs.createReadStream(url);
let validationPromise;
if (!hashType) {
validationPromise = Promise.resolve()
}
else {
const verifyHash = crypto.createHash(hashType);
// Validate downloaded hash
validationPromise = new Promise((resolve, reject) => {
stream.pipe(verifyHash)
.pause()
.on('finish', () => {
let computedHash = verifyHash.digest();
if (!computedHash.equals(hash)) {
let err = <JspmError>new Error(`Hash integrity compromised downloading ${url.href}.`);
err.hideStack = true;
reject(err);
}
resolve();
})
.on('error', reject);
});
}
let extractStream = await new Promise<Stream.Readable>((resolve, reject) => {
// pipe through gunzip if a gzipped stream
peek(stream, 3, (err, bytes, stream) => {
if (err)
reject(err);
else if (bytes[0] === 0x1f && bytes[1] === 0x8b && bytes[2] === 0x08)
resolve(stream.pipe(zlib.createGunzip()).pause());
else
resolve(stream.pause());
});
});
await Promise.all([
validationPromise,
// Unpack contents as a tar archive and save to targetDir
new Promise((resolve, reject) => {
extractStream.pipe(tar.extract(outDir, <any>{
// all dirs and files should be readable
dmode: 0o555,
fmode: 0o666,
strip: 1,
filter: function(_, header) {
return header.type !== 'file' && header.type !== 'directory'
}
}))
.on('finish', resolve)
.on('error', reject);
})
]);
}
const base64RegEx = /[a-z0-9+/]*={0,2}$/i;
const hexRegEx = /[a-f0-9]*$/g;
const hashTypes = [
{
hashType: 'sha1',
len: 20
},
{
hashType: 'sha224',
len: 28
},
{
hashType: 'sha256',
len: 32
},
{
hashType: 'sha384',
len: 48
},
{
hashType: 'sha512',
len: 64
}
];
interface Source {
url: URL,
hash?: Buffer,
hashType?: string
};
function readSource (log: Logger, source: string): Source {
const hashIndex = source.lastIndexOf('#');
if (hashIndex === -1) {
const url = new URL(source);
return { url };
}
const url = new URL(source.substr(0, hashIndex));
const hashTypeIndex = source.indexOf('-', hashIndex + 1);
// direct hexadecimal hash for sha
if (hashTypeIndex === -1) {
const hashLen = source.length - hashIndex - 1;
hexRegEx.lastIndex = hashIndex + 1;
if (hexRegEx.exec(source)[0].length !== hashLen) {
log.warn(`Source ${source} does not have a valid hexadecimal hash so is being ignored.`);
return { url };
}
const halfHashLen = hashLen / 2;
let hashMatch = hashTypes.find(({ len }) => len === halfHashLen);
if (!hashMatch || hashLen % 2 !== 0) {
log.warn(`Source ${source} has a hexadecimal hash of invalid length for any sha hash so is being ignored.`);
return { url };
}
const hashType = hashMatch.hashType;
const hash = Buffer.from(source.substr(hashIndex + 1), 'hex');
return { url, hash, hashType };
}
// base64 integrity-style hash
else {
const hashType = source.substring(hashIndex + 1, hashTypeIndex);
const hashLen = source.length - hashTypeIndex - 1;
base64RegEx.lastIndex = hashTypeIndex + 1;
if (base64RegEx.exec(source)[0].length !== hashLen) {
log.warn(`Source ${source} does not have a valid base64 hash string so is being ignored.`);
return { url };
}
const hashMatch = hashTypes.find(({ hashType: type }) => hashType === type);
if (!hashMatch) {
log.warn(`Source ${source} is using an unsupported hash algorithm so is being ignored.`);
return { url };
}
const hash = Buffer.from(source.substr(hashTypeIndex + 1), 'base64');
if (hashMatch.len !== hash.length) {
log.warn(`Source ${source} does not have a valid length ${hashType} base64 hash so it is being ignored.`);;
return { url };
}
return { url, hash, hashType };
}
} | the_stack |
import { ethers, network } from 'hardhat';
import { BigNumber, Signer } from 'ethers';
import { expect, use } from 'chai';
import { solidity } from 'ethereum-waffle';
import {
TestERC20__factory,
TestERC20,
FakeSwapRouter__factory,
FakeSwapRouter,
BadERC20__factory,
BadERC20,
ERC20SwapToPay,
ERC20FeeProxy,
} from '../../src/types';
import { erc20FeeProxyArtifact, erc20SwapToPayArtifact } from '../../src/lib';
use(solidity);
describe('contract: SwapToPay', () => {
let from: string;
let to: string;
let builder: string;
let adminSigner: Signer;
let signer: Signer;
const exchangeRateOrigin = Math.floor(Date.now() / 1000);
const referenceExample = '0xaaaa';
let paymentNetworkErc20: TestERC20;
let spentErc20: TestERC20;
let erc20FeeProxy: ERC20FeeProxy;
let fakeSwapRouter: FakeSwapRouter;
let testSwapToPay: ERC20SwapToPay;
let initialFromBalance: BigNumber;
let defaultSwapRouterAddress: string;
const erc20Decimal = BigNumber.from('1000000000000000000');
const erc20Liquidity = erc20Decimal.mul(100);
before(async () => {
[, from, to, builder] = (await ethers.getSigners()).map((s) => s.address);
[adminSigner, signer] = await ethers.getSigners();
erc20FeeProxy = erc20FeeProxyArtifact.connect(network.name, adminSigner);
testSwapToPay = erc20SwapToPayArtifact.connect(network.name, adminSigner);
});
beforeEach(async () => {
paymentNetworkErc20 = await new TestERC20__factory(adminSigner).deploy(erc20Decimal.mul(10000));
spentErc20 = await new TestERC20__factory(adminSigner).deploy(erc20Decimal.mul(1000));
// Deploy a fake router and feed it with 200 payment ERC20 + 100 requested ERC20
// The fake router fakes 2 payment ERC20 = 1 requested ERC20
fakeSwapRouter = await new FakeSwapRouter__factory(adminSigner).deploy();
await spentErc20.transfer(fakeSwapRouter.address, erc20Liquidity.mul(2));
await paymentNetworkErc20.transfer(fakeSwapRouter.address, erc20Liquidity);
defaultSwapRouterAddress = await testSwapToPay.swapRouter();
await testSwapToPay.setRouter(fakeSwapRouter.address);
await testSwapToPay.approveRouterToSpend(spentErc20.address);
await testSwapToPay.approvePaymentProxyToSpend(paymentNetworkErc20.address);
testSwapToPay = await testSwapToPay.connect(signer);
await spentErc20.transfer(from, erc20Decimal.mul(600));
spentErc20 = TestERC20__factory.connect(spentErc20.address, signer);
initialFromBalance = await spentErc20.balanceOf(from);
await spentErc20.approve(testSwapToPay.address, initialFromBalance);
});
afterEach(async () => {
testSwapToPay = testSwapToPay.connect(adminSigner);
await testSwapToPay.setRouter(defaultSwapRouterAddress);
// The contract should never keep any fund
const contractPaymentCcyBalance = await spentErc20.balanceOf(testSwapToPay.address);
const contractRequestCcyBalance = await paymentNetworkErc20.balanceOf(testSwapToPay.address);
expect(contractPaymentCcyBalance.toNumber()).to.equals(0);
expect(contractRequestCcyBalance.toNumber()).to.equals(0);
});
const expectFromBalanceUnchanged = async () => {
const finalFromBalance = await spentErc20.balanceOf(from);
expect(finalFromBalance.toString()).to.equals(initialFromBalance.toString());
};
it('swaps and pays the request', async function () {
await expect(
testSwapToPay.swapTransferWithReference(
to,
10,
// Here we spend 26 max, for 22 used in theory, to test that 4 is given back
26,
[spentErc20.address, paymentNetworkErc20.address],
referenceExample,
1,
builder,
exchangeRateOrigin + 100,
),
)
.to.emit(erc20FeeProxy, 'TransferWithReferenceAndFee')
.withArgs(
ethers.utils.getAddress(paymentNetworkErc20.address),
to,
'10',
ethers.utils.keccak256(referenceExample),
'1',
ethers.utils.getAddress(builder),
);
const finalBuilderBalance = await paymentNetworkErc20.balanceOf(builder);
const finalIssuerBalance = await paymentNetworkErc20.balanceOf(to);
expect(finalBuilderBalance.toNumber()).to.equals(1);
expect(finalIssuerBalance.toNumber()).to.equals(10);
// Test that the contract does not hold any fund after the transaction
const finalContractPaymentBalance = await spentErc20.balanceOf(testSwapToPay.address);
const finalContractRequestBalance = await paymentNetworkErc20.balanceOf(testSwapToPay.address);
expect(finalContractPaymentBalance.toNumber()).to.equals(0);
expect(finalContractRequestBalance.toNumber()).to.equals(0);
});
it('does not pay anyone if I swap 0', async function () {
await expect(
testSwapToPay.swapTransferWithReference(
to,
0,
0,
[spentErc20.address, paymentNetworkErc20.address],
referenceExample,
0,
builder,
exchangeRateOrigin + 100,
),
)
.to.emit(erc20FeeProxy, 'TransferWithReferenceAndFee')
.withArgs(
paymentNetworkErc20.address,
to,
'0',
ethers.utils.keccak256(referenceExample),
'0',
builder,
);
const finalBuilderBalance = await paymentNetworkErc20.balanceOf(builder);
const finalIssuerBalance = await paymentNetworkErc20.balanceOf(to);
expect(finalBuilderBalance.toNumber()).to.equals(0);
expect(finalIssuerBalance.toNumber()).to.equals(0);
});
it('cannot swap if too few payment tokens', async function () {
await expect(
testSwapToPay.swapTransferWithReference(
to,
10,
21, // Should be at least (10 + 1) * 2
[spentErc20.address, paymentNetworkErc20.address],
referenceExample,
1,
builder,
exchangeRateOrigin + 15,
),
).to.be.reverted;
await expectFromBalanceUnchanged();
});
it('cannot swap with a past deadline', async function () {
await expect(
testSwapToPay.swapTransferWithReference(
to,
10,
22,
[spentErc20.address, paymentNetworkErc20.address],
referenceExample,
1,
builder,
exchangeRateOrigin - 15, // Past deadline
),
).to.be.reverted;
await expectFromBalanceUnchanged();
});
it('cannot swap more tokens than liquidity', async function () {
const tooHighAmount = 100;
expect(erc20Liquidity.mul(2).lt(initialFromBalance), 'Test irrelevant with low balance').to.be
.true;
expect(
erc20Liquidity.lt(erc20Decimal.mul(tooHighAmount).mul(2)),
'Test irrelevant with low amount',
).to.be.true;
await expect(
testSwapToPay.swapTransferWithReference(
to,
erc20Decimal.mul(tooHighAmount),
initialFromBalance,
[spentErc20.address, paymentNetworkErc20.address],
referenceExample,
1000000,
builder,
exchangeRateOrigin + 15,
),
).to.be.reverted;
await expectFromBalanceUnchanged();
});
it('cannot swap more tokens than balance', async function () {
const highAmount = erc20Decimal.mul(900);
await spentErc20.approve(testSwapToPay.address, highAmount);
expect(highAmount.gt(initialFromBalance), 'Test irrelevant with high balance').to.be.true;
await expect(
testSwapToPay.swapTransferWithReference(
to,
100,
highAmount,
[spentErc20.address, paymentNetworkErc20.address],
referenceExample,
10,
builder,
exchangeRateOrigin + 15,
),
).to.be.reverted;
await expectFromBalanceUnchanged();
});
describe('Bad ERC20 support', () => {
let badERC20: BadERC20;
beforeEach(async () => {
badERC20 = await new BadERC20__factory(adminSigner).deploy(1000, 'BadERC20', 'BAD', 8);
});
it('can approve bad ERC20 to be spent by the proxy', async () => {
await expect(testSwapToPay.approvePaymentProxyToSpend(badERC20.address))
.to.emit(badERC20, 'Approval')
.withArgs(
testSwapToPay.address,
erc20FeeProxy.address,
BigNumber.from(2).pow(256).sub(1).toString(),
);
const approval = await badERC20.allowance(testSwapToPay.address, erc20FeeProxy.address);
expect(approval.toString()).to.equals(BigNumber.from(2).pow(256).sub(1).toString());
});
it('can approve bad ERC20 to be swapped by the router', async () => {
await expect(testSwapToPay.approveRouterToSpend(badERC20.address))
.to.emit(badERC20, 'Approval')
.withArgs(
testSwapToPay.address,
fakeSwapRouter.address,
BigNumber.from(2).pow(256).sub(1).toString(),
);
const approval = await badERC20.allowance(testSwapToPay.address, fakeSwapRouter.address);
expect(approval.toString()).to.equals(BigNumber.from(2).pow(256).sub(1).toString());
});
it('swaps badERC20 to another ERC20 for payment', async () => {
await testSwapToPay.approveRouterToSpend(badERC20.address);
await badERC20.transfer(from, '100');
await badERC20.connect(signer).approve(testSwapToPay.address, initialFromBalance);
await expect(
testSwapToPay.swapTransferWithReference(
to,
10,
26,
[badERC20.address, paymentNetworkErc20.address],
referenceExample,
1,
builder,
exchangeRateOrigin + 100,
),
)
.to.emit(erc20FeeProxy, 'TransferWithReferenceAndFee')
.withArgs(
paymentNetworkErc20.address,
to,
'10',
ethers.utils.keccak256(referenceExample),
'1',
builder,
);
// Test that issuer and builder (fee receiver) have been paid
const finalBuilderBalance = await paymentNetworkErc20.balanceOf(builder);
const finalIssuerBalance = await paymentNetworkErc20.balanceOf(to);
expect(finalBuilderBalance.toNumber()).to.equals(1);
expect(finalIssuerBalance.toNumber()).to.equals(10);
// Test that the contract does not hold any fund after the transaction
const finalContractPaymentBalance = await badERC20.balanceOf(testSwapToPay.address);
const finalContractRequestBalance = await paymentNetworkErc20.balanceOf(
testSwapToPay.address,
);
expect(finalContractPaymentBalance.toNumber()).to.equals(0);
expect(finalContractRequestBalance.toNumber()).to.equals(0);
});
});
}); | the_stack |
import * as React from "react";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { TableOperation } from "boxed-expression-component/src/api";
import { DmnValidator } from "./DmnValidator";
import { AutoRow } from "../core";
import { createPortal } from "react-dom";
import { context as UniformsContext } from "uniforms";
import { ErrorBoundary } from "../common/ErrorBoundary";
import { EmptyState, EmptyStateBody, EmptyStateIcon } from "@patternfly/react-core/dist/js/components/EmptyState";
import { ExclamationIcon } from "@patternfly/react-icons/dist/js/icons/exclamation-icon";
import { Text, TextContent } from "@patternfly/react-core/dist/js/components/Text";
import { DmnGrid } from "./DmnGrid";
import { DmnRunnerRule, DmnRunnerTabular, DmnRunnerTabularProps } from "../boxed";
import { NotificationSeverity } from "@kogito-tooling/notifications/dist/api";
import { dmnAutoTableDictionaries, DmnAutoTableI18nContext, dmnAutoTableI18nDefaults } from "../i18n";
import { I18nDictionariesProvider } from "@kie-tooling-core/i18n/dist/react-components";
import nextId from "react-id-generator";
import { BoxedExpressionProvider } from "boxed-expression-component/src/components";
import { DmnTableJsonSchemaBridge } from "./DmnTableJsonSchemaBridge";
import { ColumnInstance } from "react-table";
export enum EvaluationStatus {
SUCCEEDED = "SUCCEEDED",
SKIPPED = "SKIPPED",
FAILED = "FAILED",
}
export interface DecisionResultMessage {
severity: NotificationSeverity;
message: string;
messageType: string;
sourceId: string;
level: string;
}
export type Result = boolean | number | null | object | object[] | string;
export interface DecisionResult {
decisionId: string;
decisionName: string;
result: Result;
messages: DecisionResultMessage[];
evaluationStatus: EvaluationStatus;
}
export interface DmnResult {
details?: string;
stack?: string;
decisionResults?: DecisionResult[];
messages: DecisionResultMessage[];
}
interface Props {
schema: any;
tableData?: any;
setTableData?: React.Dispatch<React.SetStateAction<any>>;
results: Array<DecisionResult[] | undefined>;
formError: boolean;
setFormError: React.Dispatch<any>;
}
const FORMS_ID = "unitable-forms";
let grid: DmnGrid | undefined;
export function DmnAutoTable(props: Props) {
const errorBoundaryRef = useRef<ErrorBoundary>(null);
const [rowQuantity, setRowQuantity] = useState<number>(1);
const [formsDivRendered, setFormsDivRendered] = useState<boolean>(false);
const bridge = useMemo(() => {
return new DmnValidator().getBridge(props.schema ?? {});
}, [props.schema]);
// grid is a singleton
grid = useMemo(() => {
return bridge ? (grid ? grid : new DmnGrid(bridge)) : undefined;
}, [bridge]);
const shouldRender = useMemo(() => (grid?.getInput().length ?? 0) > 0, [grid]);
// grid should be updated everytime the bridge is updated
useEffect(() => {
grid?.updateBridge(bridge);
}, [bridge]);
// columns are saved in the grid instance, so some values can be used to improve re-renders (e.g. cell width)
const onColumnsUpdate = useCallback((columns: ColumnInstance[]) => {
grid?.setPreviousColumns(columns);
}, []);
const handleOperation = useCallback(
(tableOperation: TableOperation, rowIndex: number) => {
switch (tableOperation) {
case TableOperation.RowInsertAbove:
props.setTableData?.((previousTableData: any) => {
return [...previousTableData.slice(0, rowIndex), {}, ...previousTableData.slice(rowIndex)];
});
break;
case TableOperation.RowInsertBelow:
props.setTableData?.((previousTableData: any) => {
return [...previousTableData.slice(0, rowIndex + 1), {}, ...previousTableData.slice(rowIndex + 1)];
});
break;
case TableOperation.RowDelete:
props.setTableData?.((previousTableData: any) => {
return [...previousTableData.slice(0, rowIndex), ...previousTableData.slice(rowIndex + 1)];
});
break;
case TableOperation.RowClear:
props.setTableData?.((previousTableData: any) => {
const newTableData = [...previousTableData];
newTableData[rowIndex] = {};
return newTableData;
});
break;
case TableOperation.RowDuplicate:
props.setTableData?.((previousTableData: any) => {
return [
...previousTableData.slice(0, rowIndex + 1),
previousTableData[rowIndex],
...previousTableData.slice(rowIndex + 1),
];
});
}
},
[props.setTableData]
);
const onRowNumberUpdated = useCallback(
(rowQtt: number, operation?: TableOperation, rowIndex?: number) => {
setRowQuantity(rowQtt);
if (operation !== undefined && rowIndex !== undefined) {
handleOperation(operation, rowIndex);
}
},
[handleOperation]
);
const onSubmit = useCallback(
(model: any, index) => {
props.setTableData?.((previousTableData: any) => {
const newTableData = [...previousTableData];
newTableData[index] = model;
return newTableData;
});
},
[props.setTableData]
);
const onValidate = useCallback(
(model: any, error: any, index) => {
props.setTableData?.((previousTableData: any) => {
const newTableData = [...previousTableData];
newTableData[index] = model;
return newTableData;
});
},
[props.setTableData]
);
// every input row is managed by an AutoRow. Each row is a form, and inside of it, cell are auto generated
// using the uniforms library
const getAutoRow = useCallback(
(data, rowIndex: number, bridge: DmnTableJsonSchemaBridge) =>
({ children }: any) =>
(
<AutoRow
schema={bridge}
autosave={true}
autosaveDelay={500}
model={data}
onSubmit={(model: any) => onSubmit(model, rowIndex)}
onValidate={(model: any, error: any) => onValidate(model, error, rowIndex)}
placeholder={true}
>
<UniformsContext.Consumer>
{(ctx: any) => (
<>
{createPortal(
<form id={`dmn-auto-form-${rowIndex}`} onSubmit={(data) => ctx?.onSubmit(data)} />,
document.getElementById(FORMS_ID)!
)}
{children}
</>
)}
</UniformsContext.Consumer>
</AutoRow>
),
[onSubmit, onValidate]
);
const [selectedExpression, setExpression] = useState<Partial<DmnRunnerTabularProps>>();
useEffect(() => {
const filteredResults = props.results?.filter((result) => result !== undefined);
if (grid && filteredResults) {
const [outputSet, outputEntries] = grid.generateBoxedOutputs(props.schema ?? {}, filteredResults);
// generate output
const output: any[] = Array.from(outputSet.values());
// generate rules
const rules: any[] = [];
const inputEntriesLength = grid
.getInput()
.reduce((acc, i) => (i.insideProperties ? acc + i.insideProperties.length : acc + 1), 0);
const inputEntries = new Array(inputEntriesLength);
for (let i = 0; i < rowQuantity; i++) {
const rule: DmnRunnerRule = {
inputEntries,
outputEntries: (outputEntries?.[i] as string[]) ?? [],
};
if (formsDivRendered) {
rule.rowDelegate = getAutoRow(props.tableData[i], i, bridge);
}
rules.push(rule);
}
// clone without references an array that maybe contains an object
output.forEach((o, i) => {
const filteredOutputEntries = rules[i]?.outputEntries.filter(
(outputEntry: any[]) => typeof outputEntry === "object"
);
if (filteredOutputEntries?.length > 0) {
o.insideProperties = filteredOutputEntries?.reduce((acc: any[], outputEntry: any[]) => {
if (Array.isArray(outputEntry)) {
acc.push([...outputEntry]);
return acc;
}
if (typeof outputEntry === "object") {
acc.push(Object.assign({}, outputEntry));
return acc;
}
return [...acc, outputEntry];
}, []);
}
});
grid?.updateWidth(output, rules);
setExpression({
input: grid.getInput(),
output,
rules,
uid: selectedExpression?.uid ?? nextId(),
});
}
}, [
bridge,
formsDivRendered,
getAutoRow,
grid,
props.results,
props.schema,
props.tableData,
rowQuantity,
selectedExpression?.uid,
]);
const formErrorMessage = useMemo(
() => (
<div>
<EmptyState>
<EmptyStateIcon icon={ExclamationIcon} />
<TextContent>
<Text component={"h2"}>Error</Text>
</TextContent>
<EmptyStateBody>
<p>An error has happened</p>
</EmptyStateBody>
</EmptyState>
</div>
),
[]
);
// Resets the ErrorBoundary everytime the FormSchema is updated
useEffect(() => {
errorBoundaryRef.current?.reset();
}, [bridge]);
return (
<>
{shouldRender && bridge && selectedExpression && (
<ErrorBoundary ref={errorBoundaryRef} setHasError={props.setFormError} error={formErrorMessage}>
<I18nDictionariesProvider
defaults={dmnAutoTableI18nDefaults}
dictionaries={dmnAutoTableDictionaries}
initialLocale={navigator.language}
ctx={DmnAutoTableI18nContext}
>
<BoxedExpressionProvider expressionDefinition={selectedExpression} isRunnerTable={true}>
<DmnRunnerTabular
name={"DMN Runner"}
onRowNumberUpdated={onRowNumberUpdated}
onColumnsUpdate={onColumnsUpdate}
{...selectedExpression}
/>
</BoxedExpressionProvider>
</I18nDictionariesProvider>
</ErrorBoundary>
)}
<div ref={() => setFormsDivRendered(true)} id={FORMS_ID} />
</>
);
} | the_stack |
import {expect} from 'chai'
import nock from 'nock' // WARN: nock must be imported before NodeHttpTransport, since it modifies node's http
import {InfluxDB, ClientOptions, FluxTableMetaData} from '../../src'
import fs from 'fs'
import {CollectLinesObserver} from './util/CollectLinesObserver'
import {CollectTablesObserver} from './util/CollectTablesObserver'
import simpleResponseLines from '../fixture/query/simpleResponseLines.json'
import zlib from 'zlib'
const ORG = `my-org`
const QUERY_PATH = `/api/v2/query?org=${ORG}`
const clientOptions: ClientOptions = {
url: 'http://fake:8086',
token: 'a',
}
describe('QueryApi', () => {
beforeEach(() => {
nock.disableNetConnect()
})
afterEach(() => {
nock.cleanAll()
nock.enableNetConnect()
})
it('with function does not mutate this', () => {
const first = new InfluxDB(clientOptions).getQueryApi(ORG)
const second = first.with({gzip: true})
expect(first).is.not.equal(second)
})
it('receives lines', async () => {
const subject = new InfluxDB(clientOptions).getQueryApi(ORG)
nock(clientOptions.url)
.post(QUERY_PATH)
.reply((_uri, _requestBody) => {
return [
200,
fs.createReadStream('test/fixture/query/simpleResponse.txt'),
{'retry-after': '1'},
]
})
.persist()
const target = new CollectLinesObserver()
await new Promise((resolve, reject) =>
subject.queryLines(
'from(bucket:"my-bucket") |> range(start: 0)',
target.attach(resolve, reject)
)
)
expect(target.completed).to.equals(1)
expect(target.lines).to.deep.equal(simpleResponseLines)
})
;[
['response2', undefined],
['response2', true],
['response3', false],
].forEach(([name, gzip]) => {
it(`receives tables from ${name} with gzip=${gzip}`, async () => {
const subject = new InfluxDB(clientOptions)
.getQueryApi(ORG)
.with({gzip: gzip as boolean | undefined})
nock(clientOptions.url)
.post(QUERY_PATH)
.reply((_uri, _requestBody) => {
let stream: any = fs.createReadStream(
`test/fixture/query/${name}.txt`
)
if (gzip) stream = stream.pipe(zlib.createGzip())
return [200, stream, {'content-encoding': gzip ? 'gzip' : 'identity'}]
})
.persist()
const target = new CollectTablesObserver()
await new Promise((resolve, reject) =>
subject.queryRows(
'from(bucket:"my-bucket") |> range(start: 0)',
target.attach(resolve, reject)
)
)
const response = JSON.parse(
fs.readFileSync(`test/fixture/query/${name}.parsed.json`, 'utf8')
)
expect(target.completed).to.equals(1)
// console.log(JSON.stringify({tables: target.tables, rows: target.rows}))
expect(target.tables).to.deep.equal(response.tables)
expect(target.rows).to.deep.equal(response.rows)
})
})
it('receives properly indexed table data', async () => {
const subject = new InfluxDB(clientOptions).getQueryApi(ORG).with({})
nock(clientOptions.url)
.post(QUERY_PATH)
.reply((_uri, _requestBody) => {
return [
200,
`,result,table,_start,_stop,_time,_value,_field,_measurement,location
,,0,1970-01-01T00:00:00Z,2019-12-12T09:05:37.96237406Z,1970-01-01T00:26:15.995033574Z,55,value,temperature,west
,,0,1970-01-01T00:00:00Z,2019-12-12T09:05:37.96237406Z,1970-01-01T00:26:16.063594313Z,55,value,temperature,west
,,0,1970-01-01T00:00:00Z,2019-12-12T09:05:37.96237406Z,1970-01-01T00:26:16.069518557Z,55,value,temperature,west`,
{'retry-after': '1'},
]
})
.persist()
const values: Array<string> = []
await new Promise((resolve, reject) =>
subject.queryRows('from(bucket:"my-bucket") |> range(start: 0)', {
next(row: string[], meta: FluxTableMetaData): void {
values.push(row[meta.column('_value').index])
},
error(error: Error): void {
reject(error)
},
complete(): void {
resolve(undefined)
},
})
)
expect(values).to.deep.equal(['55', '55', '55'])
})
it('processes quoted lines properly influxdata/influxdb-client-js#179', async () => {
const subject = new InfluxDB(clientOptions).getQueryApi(ORG).with({})
nock(clientOptions.url)
.post(QUERY_PATH)
.reply((_uri, _requestBody) => {
return [
200,
`#group,false,false,true,false,false,true
#datatype,string,long,string,double,string,string
#default,_result,,,,,
,result,table,id,st_length,st_linestring,trip_id
,,0,GO506_20_6431,25.463641400535032,"-73.68691 40.820317, -73.690054 40.815413",GO506_20_6431
,,1,GO506_20_6431,25.463641400535032,"-73.68691 40.820317, -73.690054 40.815413",GO506_20_6431`,
{'retry-after': '1'},
]
})
.persist()
const values: Array<any> = []
await new Promise((resolve, reject) =>
subject.queryRows('from(bucket:"my-bucket") |> range(start: 0)', {
next(row: string[], meta: FluxTableMetaData): void {
values.push(meta.toObject(row))
},
error(error: Error): void {
reject(error)
},
complete(): void {
resolve(undefined)
},
})
)
expect(values).to.deep.equal([
{
result: '_result',
table: 0,
id: 'GO506_20_6431',
st_length: 25.463641400535032,
st_linestring: '-73.68691 40.820317, -73.690054 40.815413',
trip_id: 'GO506_20_6431',
},
{
result: '_result',
table: 1,
id: 'GO506_20_6431',
st_length: 25.463641400535032,
st_linestring: '-73.68691 40.820317, -73.690054 40.815413',
trip_id: 'GO506_20_6431',
},
])
})
it('sends custom now, type, or header', async () => {
let body: any
let authorization: any
nock(clientOptions.url)
.post(QUERY_PATH)
.reply(function(_uri, requestBody) {
body = requestBody
authorization = this.req.headers.authorization
return [200, '', {}]
})
.persist()
const query = 'from(bucket:"my-bucket") |> range(start: 0)'
const tests: Record<string, any>[] = [
{
now: undefined,
type: undefined,
},
{
now: undefined,
type: undefined,
headers: {authorization: 'Token customToken'},
},
{
now: '2020-10-05T14:48:00.000Z',
type: 'whatever',
},
]
for (const tc of tests) {
let subject = new InfluxDB(clientOptions).getQueryApi({
org: ORG,
headers: {...tc.headers},
})
if (tc.now) {
subject = subject.with({now: () => tc.now as string})
}
if (tc.type) {
subject = subject.with({type: tc.type as any})
}
await new Promise((resolve, reject) =>
subject.queryRows(query, {
next(_row: string[], _meta: FluxTableMetaData): void {},
error(error: Error): void {
reject(error)
},
complete(): void {
resolve(undefined)
},
})
)
expect(body?.type).equals(tc.type ?? 'flux')
expect(body?.query).deep.equals(query)
expect(body?.now).equals(tc.now)
expect(authorization).equals(
tc.headers?.authorization || `Token ${clientOptions.token}`
)
}
})
it('collectLines collects lines', async () => {
const subject = new InfluxDB(clientOptions).getQueryApi(ORG).with({})
nock(clientOptions.url)
.post(QUERY_PATH)
.reply((_uri, _requestBody) => {
return [
200,
fs.createReadStream('test/fixture/query/simpleResponse.txt'),
{'retry-after': '1'},
]
})
.persist()
const data = await subject.collectLines(
'from(bucket:"my-bucket") |> range(start: 0)'
)
expect(data).to.deep.equal(simpleResponseLines)
})
it('collectLines fails on server error', async () => {
const subject = new InfluxDB(clientOptions).getQueryApi(ORG).with({})
nock(clientOptions.url)
.post(QUERY_PATH)
.reply((_uri, _requestBody) => {
return [
500,
fs.createReadStream('test/fixture/query/simpleResponse.txt'),
{'retry-after': '1'},
]
})
.persist()
await subject
.collectLines('from(bucket:"my-bucket") |> range(start: 0)')
.then(
() => expect.fail('client error expected on server error'),
() => true // failure is expected
)
})
it('collectRows collects rows', async () => {
const subject = new InfluxDB(clientOptions).getQueryApi(ORG).with({})
nock(clientOptions.url)
.post(QUERY_PATH)
.reply((_uri, _requestBody) => {
return [
200,
fs.createReadStream('test/fixture/query/simpleResponse.txt'),
{'retry-after': '1'},
]
})
.persist()
const data = await subject.collectRows(
'from(bucket:"my-bucket") |> range(start: 0)'
)
expect(data.length).equals(5)
expect(data).to.be.an('array')
expect(data[1]).to.be.an('object')
})
it('collectRows can collect every second row as string', async () => {
const subject = new InfluxDB(clientOptions).getQueryApi(ORG).with({})
nock(clientOptions.url)
.post(QUERY_PATH)
.reply((_uri, _requestBody) => {
return [
200,
fs.createReadStream('test/fixture/query/simpleResponse.txt'),
{'retry-after': '1'},
]
})
.persist()
let i = 0
const data = await subject.collectRows(
'from(bucket:"my-bucket") |> range(start: 0)',
() => (i++ % 2 === 1 ? undefined : String(i))
)
expect(data.length).equals(3)
expect(data).to.be.an('array')
expect(data[2]).equals('5')
})
it('collectRows fails on server error', async () => {
const subject = new InfluxDB(clientOptions).getQueryApi(ORG).with({})
nock(clientOptions.url)
.post(QUERY_PATH)
.reply((_uri, _requestBody) => {
return [
500,
fs.createReadStream('test/fixture/query/simpleResponse.txt'),
{'retry-after': '1'},
]
})
.persist()
await subject
.collectRows('from(bucket:"my-bucket") |> range(start: 0)')
.then(
() => expect.fail('client error expected on server error'),
() => true // error is expected
)
})
it('queryRaw returns the whole response text', async () => {
const subject = new InfluxDB(clientOptions).getQueryApi(ORG).with({})
const expected = fs
.readFileSync('test/fixture/query/simpleResponse.txt')
.toString()
nock(clientOptions.url)
.post(QUERY_PATH)
.reply((_uri, _requestBody) => {
return [200, expected, {'retry-after': '1', 'content-type': 'text/csv'}]
})
.persist()
const data = await subject.queryRaw(
'from(bucket:"my-bucket") |> range(start: 0)'
)
expect(data).equals(expected)
})
it('queryRaw returns the whole response even if response content type is not text', async () => {
const subject = new InfluxDB(clientOptions).getQueryApi(ORG).with({})
const expected = fs
.readFileSync('test/fixture/query/simpleResponse.txt')
.toString()
nock(clientOptions.url)
.post(QUERY_PATH)
.reply((_uri, _requestBody) => {
return [200, expected, {'retry-after': '1'}]
})
.persist()
const data = await subject.queryRaw(
'from(bucket:"my-bucket") |> range(start: 0)'
)
expect(data).equals(expected)
})
it('queryRaw returns the plain response text even it is gzip encoded', async () => {
const subject = new InfluxDB(clientOptions)
.getQueryApi(ORG)
.with({gzip: true})
nock(clientOptions.url)
.post(QUERY_PATH)
.reply((_uri, _requestBody) => {
return [
200,
fs
.createReadStream('test/fixture/query/simpleResponse.txt')
.pipe(zlib.createGzip()),
{'content-encoding': 'gzip', 'content-type': 'text/csv'},
]
})
.persist()
const data = await subject.queryRaw(
'from(bucket:"my-bucket") |> range(start: 0)'
)
const expected = fs
.readFileSync('test/fixture/query/simpleResponse.txt')
.toString()
expect(data).equals(expected)
})
it('queryRaw fails on server error', async () => {
const subject = new InfluxDB(clientOptions).getQueryApi(ORG).with({})
nock(clientOptions.url)
.post(QUERY_PATH)
.reply((_uri, _requestBody) => {
return [
500,
fs.createReadStream('test/fixture/query/simpleResponse.txt'),
{'retry-after': '1'},
]
})
.persist()
await subject.queryRaw('from(bucket:"my-bucket") |> range(start: 0)').then(
() => expect.fail('client error expected on server error'),
() => true // error is expected
)
})
}) | the_stack |
import * as Path from "path";
import * as Fs from "fs";
import { JsxRenderer, IndexPage, createElement } from "../../src";
import Template from "../jsx-templates/test1";
import Template2 from "../jsx-templates/test2";
import Template3 from "../jsx-templates/test3";
import { expect } from "chai";
import Template4 from "../jsx-templates/test4";
import Template5 from "../jsx-templates/test5";
import Template6 from "../jsx-templates/test6";
import Template7 from "../jsx-templates/test7";
import Template8 from "../jsx-templates/test8";
import Template9 from "../jsx-templates/test9";
import Template91 from "../jsx-templates/test91";
import TemplateRegisterTokenIds from "../jsx-templates/test-register-token-ids";
import * as xstdout from "xstdout";
import { asyncVerify, runFinally } from "run-verify";
describe("IndexPage", function () {
it("should have static memoize", () => {
expect(IndexPage.memoize({})).equal(`<!DOCTYPE html>`);
expect(IndexPage.memoize({ DOCTYPE: "blah" })).equal(`<!DOCTYPE blah>`);
});
});
describe("Jsx Renderer", function () {
it("getTokenInst should return undefined if no token instance", () => {
const renderer = new JsxRenderer({
insertTokenIds: true,
templateFullPath: Path.dirname(require.resolve("../jsx-templates/test1")),
template: Template,
tokenHandlers: "../fixtures/token-handler"
});
expect(renderer.getTokenInst({ props: { _id: "blah" } })).to.equal(undefined);
});
it("should have re-entrant initializeRenderer", () => {
const renderer = new JsxRenderer({
insertTokenIds: true,
templateFullPath: Path.dirname(require.resolve("../jsx-templates/test1")),
template: Template,
tokenHandlers: "../fixtures/token-handler"
});
renderer.initializeRenderer();
renderer.initializeRenderer(true);
renderer.initializeRenderer();
});
it("should give top level Components depth 0", () => {
const TestDepth = (props, context, scope) => {
return `TestDepth: ${scope.depth}`;
};
const renderer = new JsxRenderer({
insertTokenIds: true,
templateFullPath: Path.dirname(require.resolve("../jsx-templates/test1")),
// IndexPage doesn't nest its children so depth doesn't increase from it
template: (
<IndexPage>
<html>
<head />
<body>
<TestDepth />
</body>
</html>
</IndexPage>
),
tokenHandlers: "../fixtures/token-handler"
});
renderer.initializeRenderer();
return renderer.render({}).then(context => {
expect(context.output._result).contains("TestDepth: 0");
});
});
it("should recreate nest Components and give them depth > 0", () => {
const TestDepth2 = (props, context, scope) => {
return `TestDepth2: ${scope.depth} elementId: ${scope.element.id}`;
};
const TestDepth1 = (props, context, scope) => {
return (
<div>
{`TestDepth1: ${scope.depth} elementId: ${scope.element.id}`}
<TestDepth2
// A directly nested Component is created again in every rendering pass
/>
{props.children}
</div>
);
};
const TestDepth0 = (props, context, scope) => {
return (
<div>
{`TestDepth0: ${scope.depth} elementId: ${scope.element.id}`}
{props.children}
</div>
);
};
const renderer = new JsxRenderer({
insertTokenIds: true,
templateFullPath: Path.dirname(require.resolve("../jsx-templates/test1")),
// IndexPage doesn't nest its children so depth doesn't increase from it
template: (
<IndexPage>
<html>
<head />
<body>
<TestDepth0>
<TestDepth1 />
</TestDepth0>
</body>
</html>
</IndexPage>
),
tokenHandlers: "../fixtures/token-handler"
});
renderer.initializeRenderer();
const testRender = () => renderer.render({}).then(context => context.output._result);
const results = [];
return testRender()
.then(r => results.push(r))
.then(testRender)
.then(r => results.push(r))
.then(() => {
results.forEach(r => {
expect(r).contains("TestDepth0: 0");
expect(r).contains("TestDepth1: 1");
expect(r).contains("TestDepth2: 2");
});
const regex = /TestDepth2: 2 elementId: ([0-9]+)\n/;
const a = parseInt(results[0].match(regex)[1]);
const b = parseInt(results[1].match(regex)[1]);
expect(a).to.be.above(0);
expect(b).to.be.above(0);
expect(b).to.be.above(a);
});
});
const test1ExpectedOutput = Fs.readFileSync(
Path.join(__dirname, "test1-output.txt"),
"utf8"
).trim();
it("should render index page in JSX", () => {
const renderer = new JsxRenderer({
insertTokenIds: true,
templateFullPath: Path.dirname(require.resolve("../jsx-templates/test1")),
template: Template,
tokenHandlers: "../fixtures/token-handler"
});
const verify = context => {
const r = context.output._result
.trim()
.split("\n")
.map(x => x.trimRight())
.join("\n");
expect(r).contains(test1ExpectedOutput);
};
renderer.initializeRenderer();
const intercept = xstdout.intercept(true);
return asyncVerify(
() => renderer.render({}),
verify,
() => renderer.render({}),
verify,
runFinally(() => {
intercept.restore();
})
);
});
it("should handle failure in nesting async components", async () => {
const renderer = new JsxRenderer({
insertTokenIds: true,
templateFullPath: null, // test passing no templateFullPath so CWD would be used
template: Template2,
tokenHandlers: "./test/fixtures/token-handler"
});
renderer.initializeRenderer();
const context = await renderer.render({});
expect(context.result.message).equal("test async component fail");
});
it("should have unique Token instances for multiple tokens with same _id", () => {
const renderer = new JsxRenderer({
templateFullPath: Path.dirname(require.resolve("../jsx-templates/test4")),
template: Template4,
tokenHandlers: "../fixtures/token-handler"
});
renderer.initializeRenderer();
const promise = renderer.render({});
return promise.then(context => {
const r = context.output._result.split("\n").join("_");
expect(r).contains("require1_require2_require3");
});
});
const test3ExpectedOutput = Fs.readFileSync(
Path.join(__dirname, "test3-output.txt"),
"utf8"
).trim();
it("should handle component nesting children", () => {
const renderer = new JsxRenderer({
insertTokenIds: true,
templateFullPath: Path.dirname(require.resolve("../jsx-templates/test3")),
template: Template3,
tokenHandlers: "../fixtures/token-handler"
});
renderer.initializeRenderer();
const promise = renderer.render({});
return promise.then(context => {
const r = context.output._result
.trim()
.split("\n")
.map(x => x.trimRight())
.join("\n");
expect(r).to.equal(test3ExpectedOutput);
});
});
it("should handle element memoize in tokens", () => {
const renderer = new JsxRenderer({
insertTokenIds: false,
templateFullPath: Path.dirname(require.resolve("../jsx-templates/test5")),
template: Template5,
tokenHandlers: "../fixtures/token-handler"
});
const verify = context => {
const r = context.output._result
.trim()
.split("\n")
.map(x => x.trimRight())
.join("\n");
expect(r).to.not.contain("<!-- BEGIN"); // no insert token IDs
expect(r).contains(`Token content should be memoized 1`);
};
renderer.initializeRenderer();
const promise = renderer.render({});
return promise.then(context => {
verify(context);
return renderer.render({}).then(verify);
});
});
it("should handle token with unknown ID", () => {
const renderer = new JsxRenderer({
insertTokenIds: false,
templateFullPath: Path.dirname(require.resolve("../jsx-templates/test6")),
template: Template6,
tokenHandlers: "../fixtures/token-handler"
});
const verify = context => {
expect(context.output._result).contains(
`Hello
World`
);
};
renderer.initializeRenderer();
const promise = renderer.render({});
return promise.then(context => {
verify(context);
return renderer.render({}).then(verify);
});
});
it("should handle token throwing sync error", () => {
const renderer = new JsxRenderer({
insertTokenIds: true,
templateFullPath: Path.dirname(require.resolve("../jsx-templates/test7")),
template: Template7,
tokenHandlers: "../fixtures/token-handler"
});
const verify = context => {
expect(context.result).to.be.an("Error");
expect(context.result.message).contains("test token sync throwing");
};
renderer.initializeRenderer();
const promise = renderer.render({});
return promise.then(context => {
verify(context);
return renderer.render({}).then(verify);
});
});
it("should handle token throwing async error", () => {
const renderer = new JsxRenderer({
insertTokenIds: true,
templateFullPath: Path.dirname(require.resolve("../jsx-templates/test8")),
template: Template8,
tokenHandlers: "../fixtures/token-handler"
});
const verify = context => {
expect(context.result).to.be.an("Error");
expect(context.result.message).contains("test token async throw");
};
renderer.initializeRenderer();
const promise = renderer.render({});
return promise.then(context => {
verify(context);
return renderer.render({}).then(verify);
});
});
it("should handle JSX tag throw a sync error", () => {
const renderer = new JsxRenderer({
insertTokenIds: true,
templateFullPath: Path.dirname(require.resolve("../jsx-templates/test9")),
template: Template9,
tokenHandlers: "../fixtures/token-handler"
});
const verify = context => {
expect(context.result).to.be.an("Error");
expect(context.result.message).contains("test JSX tag sync error");
};
renderer.initializeRenderer();
const promise = renderer.render({});
return promise.then(context => {
verify(context);
return renderer.render({}).then(verify);
});
});
it("should handle JSX tag throw an async error", () => {
const renderer = new JsxRenderer({
insertTokenIds: true,
templateFullPath: Path.dirname(require.resolve("../jsx-templates/test91")),
template: Template91,
tokenHandlers: [
"../fixtures/token-handler",
{
name: "test1",
beforeRender: () => {
//
},
afterRender: () => {
//
},
tokens: {}
}
]
});
const verify = context => {
expect(context.result).to.be.an("Error");
expect(context.result.message).contains("test JSX tag async error");
};
renderer.initializeRenderer();
const promise = renderer.render({});
return promise.then(context => {
verify(context);
});
});
it("should handle LoadTokenHandler from template", async () => {
const renderer = new JsxRenderer({
insertTokenIds: true,
templateFullPath: Path.dirname(require.resolve("../jsx-templates/test91")),
template: TemplateRegisterTokenIds
});
renderer.initializeRenderer();
const context = await renderer.render({});
const result = await context.result;
expect(result).contains("this is a test<!-- FOO END -->");
expect(result).contains("<div>user-token-1</div>");
});
}); | the_stack |
import { core, SfdxCommand, flags } from '@salesforce/command';
import * as fs from 'fs';
import * as path from 'path';
// Initialize Messages with the current plugin directory
core.Messages.importMessagesDirectory(__dirname);
// Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core,
// or any library that is using the messages framework can also be loaded this way.
const messages = core.Messages.loadMessages('texei-sfdx-plugin', 'org-shape-extract');
const definitionFileName = 'project-scratch-def.json';
// TODO: Add bypassed values in the correct array, and after investigation either fix or update org-shape-extract.md doc
const settingValuesProdOnly = ['Packaging2','ExpandedSourceTrackingPref','ScratchOrgManagementPref','ShapeExportPref',
'PRMAccRelPref'];
const settingValuesBugsRelated = ['enableOmniAutoLoginPrompt','enableOmniSecondaryRoutingPriority',
'VoiceCallListEnabled','VoiceCallRecordingEnabled','VoiceCoachingEnabled','VoiceConferencingEnabled',
'VoiceEnabled','VoiceLocalPresenceEnabled','VoiceMailDropEnabled','VoiceMailEnabled','CallDispositionEnabled'];
const settingValuesBugsToInvestigate = ['enableEngagementHistoryDashboards','EventLogWaveIntegEnabled','SendThroughGmailPref',
'PardotAppV1Enabled','PardotEmbeddedAnalyticsPref','PardotEnabled',
'allowUsersToRelateMultipleContactsToTasksAndEvents','socialCustomerServiceSettings',
'opportunityFilterSettings','enableAccountOwnerReport','defaultCaseOwner','PortalUserShareOnCase',
'keepRecordTypeOnAssignmentRule','webToCase','routingAddresses'];
// TODO: manage dependencies correctly: for instance, setting "enableCommunityWorkspaces" requires "features":["Communities"]
const featureDependencies = new Map<string, String>([['enableCommunityWorkspaces','Communities']]);
export default class Extract extends SfdxCommand {
public static description = messages.getMessage('commandDescription');
public static examples = [
`$ sfdx texei:org:shape:extract -u bulma@capsulecorp.com"`
];
protected static flagsConfig = {
outputdir: flags.string({ char: 'd', description: messages.getMessage('directoryFlagDescription'), default: 'config' }),
scope: flags.string({ char: 's', description: messages.getMessage('scopeFlagDescription'), options: ['basic', 'full'], default: 'basic' })
};
// Comment this out if your command does not require an org username
protected static requiresUsername = true;
// Comment this out if your command does not support a hub org username
protected static requiresDevhubUsername = false;
// Set this to true if your command requires a project workspace; 'requiresProject' is false by default
protected static requiresProject = false;
public async run(): Promise<any> {
this.ux.warn('This command is in beta, only extracting some settings. Read more at https://github.com/texei/texei-sfdx-plugin/blob/master/org-shape-command.md');
this.ux.startSpinner('Extracting Org Shape', null, { stdout: true });
// Query org for org infos
const query = 'Select Name, Country, LanguageLocaleKey, OrganizationType from Organization';
const conn = this.org.getConnection();
const orgInfos = await conn.query(query) as any;
let featureList: any = [];
let definitionValues: any = {};
let definitionValuesTemp: any = {};
definitionValuesTemp.settings = {};
const settingValuesToIgnore = (this.flags.scope === 'full') ?
[] :
settingValuesProdOnly.concat(settingValuesBugsRelated).concat(settingValuesBugsToInvestigate);
// Getting API Version
// TODO: put this in a helper ? Is there a Core library method to get this OOTB ?
let apiVersion = this.flags.apiversion;
// if there is an api version set via the apiversion flag, use it
// Otherwise use the latest api version available on the org
if (!apiVersion) {
apiVersion = await this.org.retrieveMaxApiVersion();
}
// Querying Settings
const settingPromises = [];
var types = [{type: 'Settings', folder: null}];
await conn.metadata.list(types, apiVersion, function(err, metadata) {
if (err) { return console.error('err', err); }
for (let meta of metadata) {
const settingType = meta.fullName+meta.type;
// Querying settings details - Is there a way to do only 1 query with jsforce ?
const settingPromise = conn.metadata.read(settingType, settingType);
settingPromises.push(settingPromise);
}
});
// Waiting for all promises to resolve
await Promise.all(settingPromises).then((settingValues) => {
// TODO: Write these in the file. - Is everything part of the scratch definition file ? For instance Business Hours ?
// Upper camel case --> lower camel case ; ex: OmniChannelSettings --> omniChannelSettings
for (const setting of settingValues) {
// TODO: manage dependencies on features
// For whatever reason, this setting has not the same format as others
if (setting.fullName == 'OrgPreferenceSettings') {
const settingsName = this.toLowerCamelCase(setting.fullName);
let settingValues: any = {};
for (const subsetting of setting.preferences) {
if (!settingValuesToIgnore.includes(subsetting.settingName)) {
const settingName = this.toLowerCamelCase(subsetting.settingName);
settingValues[settingName] = subsetting.settingValue;
// Checking if there is a feature dependency
if (featureDependencies.has(settingName)) {
featureList.push(featureDependencies.get(settingName));
}
}
}
definitionValuesTemp.settings[settingsName] = settingValues;
}
// FIXME: Lots of settings have errors (for instance linked to metadata)
// TODO: Add to org-shape-command.md
// ForecastingSettings
// Error shape/settings/Forecasting.settings Forecasting Cannot resolve Forecasting Type from name or attributes
// searchSettings (Includes custom objects not there yet)
// Error shape/settings/Search.settings Search Entity is null or entity element's name is null
// Territory2Settings
// Error shape/settings/Territory2.settings Territory2 Not available for deploy for this organization
// Error shape/settings/Account.settings Account You cannot set a value for enableAccountOwnerReport unless your organization-wide sharing access level for Accounts is set to Private.
// Error shape/settings/Case.settings Case CaseSettings: There are no record types defined for Case.
// Error shape/settings/Case.settings Case CaseSettings: Specify the default case user.
// Error shape/settings/Case.settings Case In field: caseOwner - no Queue named myQueue found
// Error shape/settings/Case.settings Case WebToCaseSettings: Invalid caseOrigin Formulaire
// Error shape/settings/OrgPreference.settings OrgPreference You do not have sufficient rights to access the organization setting: PortalUserShareOnCase
// TODO: Test all settings and add them to org-shape-command.md if it doesn't work
const settingsToTest = ['AccountSettings',
'ActivitiesSettings',
'AddressSettings',
'BusinessHoursSettings',
'CaseSettings',
'CommunitiesSettings',
'CompanySettings',
'ContractSettings',
'EntitlementSettings',
'FileUploadAndDownloadSecuritySettings',
'IdeasSettings',
'MacroSettings',
'MobileSettings',
'NameSettings',
'OmniChannelSettings',
'OpportunitySettings',
'OrderSettings',
'PathAssistantSettings',
'ProductSettings',
'QuoteSettings',
'SecuritySettings',
'SocialCustomerServiceSettings'];
if (setting.fullName !== undefined && (settingsToTest.includes(setting.fullName) || this.flags.scope === 'full')) {
const settingName = this.toLowerCamelCase(setting.fullName);
if (!settingValuesToIgnore.includes(settingName)) {
const formattedSetting = this.formatSetting(setting);
// All this code to ignore values should be refactored in a better way, todo
for (const property in setting) {
// Checking if there is a feature dependency
if (featureDependencies.has(property)) {
featureList.push(featureDependencies.get(property));
}
if (setting.hasOwnProperty(property) && settingValuesToIgnore.includes(property)) {
delete setting[property];
}
// TODO: Handle recursivity correctly
for (const prop in setting[property]) {
if (setting.hasOwnProperty(property) && setting[property].hasOwnProperty(prop) && settingValuesToIgnore.includes(prop)) {
delete setting[property][prop];
}
}
}
definitionValuesTemp.settings[settingName] = formattedSetting;
}
}
}
// Construct the object with all values
definitionValues.orgName = orgInfos.records[0].Name;
definitionValues.edition = this.mapOrganizationTypeToScratchOrgEdition(orgInfos.records[0].OrganizationType);
definitionValues.language = orgInfos.records[0].LanguageLocaleKey;
// Adding features if needed
if (featureList.length > 0) {
definitionValues.features = featureList;
}
definitionValues.settings = definitionValuesTemp.settings;
});
// If a path was specified, add it
let filePath = definitionFileName;
if (this.flags.outputdir) {
filePath = path.join(
this.flags.outputdir,
definitionFileName
);
}
// Write project-scratch-def.json file
const saveToPath = path.join(
process.cwd(),
filePath
);
await fs.writeFile(saveToPath, this.removeQuotes(JSON.stringify(definitionValues, null, 2)), 'utf8', function (err) {
if (err) {
throw new core.SfdxError(`Unable to write definition file at path ${process.cwd()}: ${err}`);
}
});
this.ux.stopSpinner('Done.');
// Everything went fine, return an object that will be used for --json
return { org: this.org.getOrgId(), message: definitionValues };
}
private toLowerCamelCase(label) {
return label.charAt(0).toLowerCase() + label.slice(1);
}
// Is there a better way to do this ?
private removeQuotes(myJson) {
myJson = myJson.replace(new RegExp('"true"', 'g'), true);
myJson = myJson.replace(new RegExp('"false"', 'g'), false);
return myJson;
}
private formatSetting(myJson) {
this.toLowerCamelCase(myJson.fullName);
delete myJson.fullName;
return myJson;
}
/**
* This maps organization types to one of the 4 available scratch org editions with the fallback of "Developer".
* Sources:
* [Way to identify Salesforce edition using API?](https://salesforce.stackexchange.com/questions/216/way-to-identify-salesforce-edition-using-api)
* [Salesforce Editions That Are No Longer Sold](https://help.salesforce.com/articleView?id=overview_other_editions.htm&type=5)
* [Scratch Org Definition Configuration Values](https://developer.salesforce.com/docs/atlas.en-us.sfdx_dev.meta/sfdx_dev/sfdx_dev_scratch_orgs_def_file_config_values.htm)
* @param organizationType
*/
private mapOrganizationTypeToScratchOrgEdition(organizationType) {
// possible organization types as of v47.0:
// ["Team Edition","Professional Edition","Enterprise Edition","Developer Edition","Personal Edition","Unlimited Edition","Contact Manager Edition","Base Edition"]
// Base Edition: https://twitter.com/EvilN8/status/430810563044601856
if (["Team Edition", "Personal Edition", "Base Edition"].includes(organizationType)) {
return "Group";
}
if (["Contact Manager Edition"].includes(organizationType)) {
return "Professional";
}
if (["Unlimited Edition"].includes(organizationType)) {
return "Enterprise";
}
const sanitizedOrganizationType = organizationType.replace(" Edition", "");
if (
["Group", "Professional", "Enterprise", "Developer"].includes(sanitizedOrganizationType)
) {
return sanitizedOrganizationType;
}
return "Developer";
}
} | the_stack |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.