text stringlengths 2.5k 6.39M | kind stringclasses 3
values |
|---|---|
import * as pulumi from "@pulumi/pulumi";
import { input as inputs, output as outputs } from "../types";
import * as utilities from "../utilities";
/**
* Persistent disks are durable storage devices that function similarly to
* the physical disks in a desktop or a server. Compute Engine manages the
* hardware behind these devices to ensure data redundancy and optimize
* performance for you. Persistent disks are available as either standard
* hard disk drives (HDD) or solid-state drives (SSD).
*
* Persistent disks are located independently from your virtual machine
* instances, so you can detach or move persistent disks to keep your data
* even after you delete your instances. Persistent disk performance scales
* automatically with size, so you can resize your existing persistent disks
* or add more persistent disks to an instance to meet your performance and
* storage space requirements.
*
* Add a persistent disk to your instance when you need reliable and
* affordable storage with consistent performance characteristics.
*
* To get more information about Disk, see:
*
* * [API documentation](https://cloud.google.com/compute/docs/reference/v1/disks)
* * How-to Guides
* * [Adding a persistent disk](https://cloud.google.com/compute/docs/disks/add-persistent-disk)
*
* > **Warning:** All arguments including `disk_encryption_key.raw_key` will be stored in the raw
* state as plain-text. [Read more about secrets in state](https://www.pulumi.com/docs/intro/concepts/programming-model/#secrets).
*
* ## Example Usage
* ### Disk Basic
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const defaultDisk = new gcp.compute.Disk("default", {
* image: "debian-8-jessie-v20170523",
* labels: {
* environment: "dev",
* },
* physicalBlockSizeBytes: 4096,
* type: "pd-ssd",
* zone: "us-central1-a",
* });
* ```
*
* ## Import
*
* Disk can be imported using any of these accepted formats
*
* ```sh
* $ pulumi import gcp:compute/disk:Disk default projects/{{project}}/zones/{{zone}}/disks/{{name}}
* ```
*
* ```sh
* $ pulumi import gcp:compute/disk:Disk default {{project}}/{{zone}}/{{name}}
* ```
*
* ```sh
* $ pulumi import gcp:compute/disk:Disk default {{zone}}/{{name}}
* ```
*
* ```sh
* $ pulumi import gcp:compute/disk:Disk default {{name}}
* ```
*/
export class Disk extends pulumi.CustomResource {
/**
* Get an existing Disk resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: DiskState, opts?: pulumi.CustomResourceOptions): Disk {
return new Disk(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'gcp:compute/disk:Disk';
/**
* Returns true if the given object is an instance of Disk. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is Disk {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === Disk.__pulumiType;
}
/**
* Creation timestamp in RFC3339 text format.
*/
public /*out*/ readonly creationTimestamp!: pulumi.Output<string>;
/**
* An optional description of this resource. Provide this property when
* you create the resource.
*/
public readonly description!: pulumi.Output<string | undefined>;
/**
* Encrypts the disk using a customer-supplied encryption key.
* After you encrypt a disk with a customer-supplied key, you must
* provide the same key if you use the disk later (e.g. to create a disk
* snapshot or an image, or to attach the disk to a virtual machine).
* Customer-supplied encryption keys do not protect access to metadata of
* the disk.
* If you do not provide an encryption key when creating the disk, then
* the disk will be encrypted using an automatically generated key and
* you do not need to provide a key to use the disk later.
* Structure is documented below.
*/
public readonly diskEncryptionKey!: pulumi.Output<outputs.compute.DiskDiskEncryptionKey | undefined>;
/**
* The image from which to initialize this disk. This can be
* one of: the image's `selfLink`, `projects/{project}/global/images/{image}`,
* `projects/{project}/global/images/family/{family}`, `global/images/{image}`,
* `global/images/family/{family}`, `family/{family}`, `{project}/{family}`,
* `{project}/{image}`, `{family}`, or `{image}`. If referred by family, the
* images names must include the family name. If they don't, use the
* [gcp.compute.Image data source](https://www.terraform.io/docs/providers/google/d/compute_image.html).
* For instance, the image `centos-6-v20180104` includes its family name `centos-6`.
* These images can be referred by family name here.
*/
public readonly image!: pulumi.Output<string | undefined>;
/**
* Specifies the disk interface to use for attaching this disk, which is either SCSI or NVME. The default is SCSI.
*
* @deprecated This field is no longer in use, disk interfaces will be automatically determined on attachment. To resolve this issue, remove this field from your config.
*/
public readonly interface!: pulumi.Output<string | undefined>;
/**
* The fingerprint used for optimistic locking of this resource. Used internally during updates.
*/
public /*out*/ readonly labelFingerprint!: pulumi.Output<string>;
/**
* Labels to apply to this disk. A list of key->value pairs.
*/
public readonly labels!: pulumi.Output<{[key: string]: string} | undefined>;
/**
* Last attach timestamp in RFC3339 text format.
*/
public /*out*/ readonly lastAttachTimestamp!: pulumi.Output<string>;
/**
* Last detach timestamp in RFC3339 text format.
*/
public /*out*/ readonly lastDetachTimestamp!: pulumi.Output<string>;
/**
* Indicates whether or not the disk can be read/write attached to more than one instance.
*/
public readonly multiWriter!: pulumi.Output<boolean | undefined>;
/**
* Name of the resource. Provided by the client when the resource is
* created. The name must be 1-63 characters long, and comply with
* RFC1035. Specifically, the name must be 1-63 characters long and match
* the regular expression `a-z?` which means the
* first character must be a lowercase letter, and all following
* characters must be a dash, lowercase letter, or digit, except the last
* character, which cannot be a dash.
*/
public readonly name!: pulumi.Output<string>;
/**
* Physical block size of the persistent disk, in bytes. If not present
* in a request, a default value is used. Currently supported sizes
* are 4096 and 16384, other sizes may be added in the future.
* If an unsupported value is requested, the error message will list
* the supported values for the caller's project.
*/
public readonly physicalBlockSizeBytes!: pulumi.Output<number>;
/**
* The ID of the project in which the resource belongs.
* If it is not provided, the provider project is used.
*/
public readonly project!: pulumi.Output<string>;
/**
* Indicates how many IOPS must be provisioned for the disk.
*/
public readonly provisionedIops!: pulumi.Output<number | undefined>;
/**
* Resource policies applied to this disk for automatic snapshot creations.
* ~>**NOTE** This value does not support updating the
* resource policy, as resource policies can not be updated more than
* one at a time. Use
* `gcp.compute.DiskResourcePolicyAttachment`
* to allow for updating the resource policy attached to the disk.
*/
public readonly resourcePolicies!: pulumi.Output<string[]>;
/**
* The URI of the created resource.
*/
public /*out*/ readonly selfLink!: pulumi.Output<string>;
/**
* Size of the persistent disk, specified in GB. You can specify this
* field when creating a persistent disk using the `image` or
* `snapshot` parameter, or specify it alone to create an empty
* persistent disk.
* If you specify this field along with `image` or `snapshot`,
* the value must not be less than the size of the image
* or the size of the snapshot.
* ~>**NOTE** If you change the size, the provider updates the disk size
* if upsizing is detected but recreates the disk if downsizing is requested.
* You can add `lifecycle.prevent_destroy` in the config to prevent destroying
* and recreating.
*/
public readonly size!: pulumi.Output<number>;
/**
* The source snapshot used to create this disk. You can provide this as
* a partial or full URL to the resource. If the snapshot is in another
* project than this disk, you must supply a full URL. For example, the
* following are valid values:
* * `https://www.googleapis.com/compute/v1/projects/project/global/snapshots/snapshot`
* * `projects/project/global/snapshots/snapshot`
* * `global/snapshots/snapshot`
* * `snapshot`
*/
public readonly snapshot!: pulumi.Output<string | undefined>;
/**
* The customer-supplied encryption key of the source image. Required if
* the source image is protected by a customer-supplied encryption key.
* Structure is documented below.
*/
public readonly sourceImageEncryptionKey!: pulumi.Output<outputs.compute.DiskSourceImageEncryptionKey | undefined>;
/**
* The ID value of the image used to create this disk. This value identifies the exact image that was used to create this
* persistent disk. For example, if you created the persistent disk from an image that was later deleted and recreated
* under the same name, the source image ID would identify the exact version of the image that was used.
*/
public /*out*/ readonly sourceImageId!: pulumi.Output<string>;
/**
* The customer-supplied encryption key of the source snapshot. Required
* if the source snapshot is protected by a customer-supplied encryption
* key.
* Structure is documented below.
*/
public readonly sourceSnapshotEncryptionKey!: pulumi.Output<outputs.compute.DiskSourceSnapshotEncryptionKey | undefined>;
/**
* The unique ID of the snapshot used to create this disk. This value identifies the exact snapshot that was used to create
* this persistent disk. For example, if you created the persistent disk from a snapshot that was later deleted and
* recreated under the same name, the source snapshot ID would identify the exact version of the snapshot that was used.
*/
public /*out*/ readonly sourceSnapshotId!: pulumi.Output<string>;
/**
* URL of the disk type resource describing which disk type to use to
* create the disk. Provide this when creating the disk.
*/
public readonly type!: pulumi.Output<string | undefined>;
/**
* Links to the users of the disk (attached instances) in form: project/zones/zone/instances/instance
*/
public /*out*/ readonly users!: pulumi.Output<string[]>;
/**
* A reference to the zone where the disk resides.
*/
public readonly zone!: pulumi.Output<string>;
/**
* Create a Disk resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args?: DiskArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: DiskArgs | DiskState, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState as DiskState | undefined;
inputs["creationTimestamp"] = state ? state.creationTimestamp : undefined;
inputs["description"] = state ? state.description : undefined;
inputs["diskEncryptionKey"] = state ? state.diskEncryptionKey : undefined;
inputs["image"] = state ? state.image : undefined;
inputs["interface"] = state ? state.interface : undefined;
inputs["labelFingerprint"] = state ? state.labelFingerprint : undefined;
inputs["labels"] = state ? state.labels : undefined;
inputs["lastAttachTimestamp"] = state ? state.lastAttachTimestamp : undefined;
inputs["lastDetachTimestamp"] = state ? state.lastDetachTimestamp : undefined;
inputs["multiWriter"] = state ? state.multiWriter : undefined;
inputs["name"] = state ? state.name : undefined;
inputs["physicalBlockSizeBytes"] = state ? state.physicalBlockSizeBytes : undefined;
inputs["project"] = state ? state.project : undefined;
inputs["provisionedIops"] = state ? state.provisionedIops : undefined;
inputs["resourcePolicies"] = state ? state.resourcePolicies : undefined;
inputs["selfLink"] = state ? state.selfLink : undefined;
inputs["size"] = state ? state.size : undefined;
inputs["snapshot"] = state ? state.snapshot : undefined;
inputs["sourceImageEncryptionKey"] = state ? state.sourceImageEncryptionKey : undefined;
inputs["sourceImageId"] = state ? state.sourceImageId : undefined;
inputs["sourceSnapshotEncryptionKey"] = state ? state.sourceSnapshotEncryptionKey : undefined;
inputs["sourceSnapshotId"] = state ? state.sourceSnapshotId : undefined;
inputs["type"] = state ? state.type : undefined;
inputs["users"] = state ? state.users : undefined;
inputs["zone"] = state ? state.zone : undefined;
} else {
const args = argsOrState as DiskArgs | undefined;
inputs["description"] = args ? args.description : undefined;
inputs["diskEncryptionKey"] = args ? args.diskEncryptionKey : undefined;
inputs["image"] = args ? args.image : undefined;
inputs["interface"] = args ? args.interface : undefined;
inputs["labels"] = args ? args.labels : undefined;
inputs["multiWriter"] = args ? args.multiWriter : undefined;
inputs["name"] = args ? args.name : undefined;
inputs["physicalBlockSizeBytes"] = args ? args.physicalBlockSizeBytes : undefined;
inputs["project"] = args ? args.project : undefined;
inputs["provisionedIops"] = args ? args.provisionedIops : undefined;
inputs["resourcePolicies"] = args ? args.resourcePolicies : undefined;
inputs["size"] = args ? args.size : undefined;
inputs["snapshot"] = args ? args.snapshot : undefined;
inputs["sourceImageEncryptionKey"] = args ? args.sourceImageEncryptionKey : undefined;
inputs["sourceSnapshotEncryptionKey"] = args ? args.sourceSnapshotEncryptionKey : undefined;
inputs["type"] = args ? args.type : undefined;
inputs["zone"] = args ? args.zone : undefined;
inputs["creationTimestamp"] = undefined /*out*/;
inputs["labelFingerprint"] = undefined /*out*/;
inputs["lastAttachTimestamp"] = undefined /*out*/;
inputs["lastDetachTimestamp"] = undefined /*out*/;
inputs["selfLink"] = undefined /*out*/;
inputs["sourceImageId"] = undefined /*out*/;
inputs["sourceSnapshotId"] = undefined /*out*/;
inputs["users"] = undefined /*out*/;
}
if (!opts.version) {
opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()});
}
super(Disk.__pulumiType, name, inputs, opts);
}
}
/**
* Input properties used for looking up and filtering Disk resources.
*/
export interface DiskState {
/**
* Creation timestamp in RFC3339 text format.
*/
creationTimestamp?: pulumi.Input<string>;
/**
* An optional description of this resource. Provide this property when
* you create the resource.
*/
description?: pulumi.Input<string>;
/**
* Encrypts the disk using a customer-supplied encryption key.
* After you encrypt a disk with a customer-supplied key, you must
* provide the same key if you use the disk later (e.g. to create a disk
* snapshot or an image, or to attach the disk to a virtual machine).
* Customer-supplied encryption keys do not protect access to metadata of
* the disk.
* If you do not provide an encryption key when creating the disk, then
* the disk will be encrypted using an automatically generated key and
* you do not need to provide a key to use the disk later.
* Structure is documented below.
*/
diskEncryptionKey?: pulumi.Input<inputs.compute.DiskDiskEncryptionKey>;
/**
* The image from which to initialize this disk. This can be
* one of: the image's `selfLink`, `projects/{project}/global/images/{image}`,
* `projects/{project}/global/images/family/{family}`, `global/images/{image}`,
* `global/images/family/{family}`, `family/{family}`, `{project}/{family}`,
* `{project}/{image}`, `{family}`, or `{image}`. If referred by family, the
* images names must include the family name. If they don't, use the
* [gcp.compute.Image data source](https://www.terraform.io/docs/providers/google/d/compute_image.html).
* For instance, the image `centos-6-v20180104` includes its family name `centos-6`.
* These images can be referred by family name here.
*/
image?: pulumi.Input<string>;
/**
* Specifies the disk interface to use for attaching this disk, which is either SCSI or NVME. The default is SCSI.
*
* @deprecated This field is no longer in use, disk interfaces will be automatically determined on attachment. To resolve this issue, remove this field from your config.
*/
interface?: pulumi.Input<string>;
/**
* The fingerprint used for optimistic locking of this resource. Used internally during updates.
*/
labelFingerprint?: pulumi.Input<string>;
/**
* Labels to apply to this disk. A list of key->value pairs.
*/
labels?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* Last attach timestamp in RFC3339 text format.
*/
lastAttachTimestamp?: pulumi.Input<string>;
/**
* Last detach timestamp in RFC3339 text format.
*/
lastDetachTimestamp?: pulumi.Input<string>;
/**
* Indicates whether or not the disk can be read/write attached to more than one instance.
*/
multiWriter?: pulumi.Input<boolean>;
/**
* Name of the resource. Provided by the client when the resource is
* created. The name must be 1-63 characters long, and comply with
* RFC1035. Specifically, the name must be 1-63 characters long and match
* the regular expression `a-z?` which means the
* first character must be a lowercase letter, and all following
* characters must be a dash, lowercase letter, or digit, except the last
* character, which cannot be a dash.
*/
name?: pulumi.Input<string>;
/**
* Physical block size of the persistent disk, in bytes. If not present
* in a request, a default value is used. Currently supported sizes
* are 4096 and 16384, other sizes may be added in the future.
* If an unsupported value is requested, the error message will list
* the supported values for the caller's project.
*/
physicalBlockSizeBytes?: pulumi.Input<number>;
/**
* The ID of the project in which the resource belongs.
* If it is not provided, the provider project is used.
*/
project?: pulumi.Input<string>;
/**
* Indicates how many IOPS must be provisioned for the disk.
*/
provisionedIops?: pulumi.Input<number>;
/**
* Resource policies applied to this disk for automatic snapshot creations.
* ~>**NOTE** This value does not support updating the
* resource policy, as resource policies can not be updated more than
* one at a time. Use
* `gcp.compute.DiskResourcePolicyAttachment`
* to allow for updating the resource policy attached to the disk.
*/
resourcePolicies?: pulumi.Input<pulumi.Input<string>[]>;
/**
* The URI of the created resource.
*/
selfLink?: pulumi.Input<string>;
/**
* Size of the persistent disk, specified in GB. You can specify this
* field when creating a persistent disk using the `image` or
* `snapshot` parameter, or specify it alone to create an empty
* persistent disk.
* If you specify this field along with `image` or `snapshot`,
* the value must not be less than the size of the image
* or the size of the snapshot.
* ~>**NOTE** If you change the size, the provider updates the disk size
* if upsizing is detected but recreates the disk if downsizing is requested.
* You can add `lifecycle.prevent_destroy` in the config to prevent destroying
* and recreating.
*/
size?: pulumi.Input<number>;
/**
* The source snapshot used to create this disk. You can provide this as
* a partial or full URL to the resource. If the snapshot is in another
* project than this disk, you must supply a full URL. For example, the
* following are valid values:
* * `https://www.googleapis.com/compute/v1/projects/project/global/snapshots/snapshot`
* * `projects/project/global/snapshots/snapshot`
* * `global/snapshots/snapshot`
* * `snapshot`
*/
snapshot?: pulumi.Input<string>;
/**
* The customer-supplied encryption key of the source image. Required if
* the source image is protected by a customer-supplied encryption key.
* Structure is documented below.
*/
sourceImageEncryptionKey?: pulumi.Input<inputs.compute.DiskSourceImageEncryptionKey>;
/**
* The ID value of the image used to create this disk. This value identifies the exact image that was used to create this
* persistent disk. For example, if you created the persistent disk from an image that was later deleted and recreated
* under the same name, the source image ID would identify the exact version of the image that was used.
*/
sourceImageId?: pulumi.Input<string>;
/**
* The customer-supplied encryption key of the source snapshot. Required
* if the source snapshot is protected by a customer-supplied encryption
* key.
* Structure is documented below.
*/
sourceSnapshotEncryptionKey?: pulumi.Input<inputs.compute.DiskSourceSnapshotEncryptionKey>;
/**
* The unique ID of the snapshot used to create this disk. This value identifies the exact snapshot that was used to create
* this persistent disk. For example, if you created the persistent disk from a snapshot that was later deleted and
* recreated under the same name, the source snapshot ID would identify the exact version of the snapshot that was used.
*/
sourceSnapshotId?: pulumi.Input<string>;
/**
* URL of the disk type resource describing which disk type to use to
* create the disk. Provide this when creating the disk.
*/
type?: pulumi.Input<string>;
/**
* Links to the users of the disk (attached instances) in form: project/zones/zone/instances/instance
*/
users?: pulumi.Input<pulumi.Input<string>[]>;
/**
* A reference to the zone where the disk resides.
*/
zone?: pulumi.Input<string>;
}
/**
* The set of arguments for constructing a Disk resource.
*/
export interface DiskArgs {
/**
* An optional description of this resource. Provide this property when
* you create the resource.
*/
description?: pulumi.Input<string>;
/**
* Encrypts the disk using a customer-supplied encryption key.
* After you encrypt a disk with a customer-supplied key, you must
* provide the same key if you use the disk later (e.g. to create a disk
* snapshot or an image, or to attach the disk to a virtual machine).
* Customer-supplied encryption keys do not protect access to metadata of
* the disk.
* If you do not provide an encryption key when creating the disk, then
* the disk will be encrypted using an automatically generated key and
* you do not need to provide a key to use the disk later.
* Structure is documented below.
*/
diskEncryptionKey?: pulumi.Input<inputs.compute.DiskDiskEncryptionKey>;
/**
* The image from which to initialize this disk. This can be
* one of: the image's `selfLink`, `projects/{project}/global/images/{image}`,
* `projects/{project}/global/images/family/{family}`, `global/images/{image}`,
* `global/images/family/{family}`, `family/{family}`, `{project}/{family}`,
* `{project}/{image}`, `{family}`, or `{image}`. If referred by family, the
* images names must include the family name. If they don't, use the
* [gcp.compute.Image data source](https://www.terraform.io/docs/providers/google/d/compute_image.html).
* For instance, the image `centos-6-v20180104` includes its family name `centos-6`.
* These images can be referred by family name here.
*/
image?: pulumi.Input<string>;
/**
* Specifies the disk interface to use for attaching this disk, which is either SCSI or NVME. The default is SCSI.
*
* @deprecated This field is no longer in use, disk interfaces will be automatically determined on attachment. To resolve this issue, remove this field from your config.
*/
interface?: pulumi.Input<string>;
/**
* Labels to apply to this disk. A list of key->value pairs.
*/
labels?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* Indicates whether or not the disk can be read/write attached to more than one instance.
*/
multiWriter?: pulumi.Input<boolean>;
/**
* Name of the resource. Provided by the client when the resource is
* created. The name must be 1-63 characters long, and comply with
* RFC1035. Specifically, the name must be 1-63 characters long and match
* the regular expression `a-z?` which means the
* first character must be a lowercase letter, and all following
* characters must be a dash, lowercase letter, or digit, except the last
* character, which cannot be a dash.
*/
name?: pulumi.Input<string>;
/**
* Physical block size of the persistent disk, in bytes. If not present
* in a request, a default value is used. Currently supported sizes
* are 4096 and 16384, other sizes may be added in the future.
* If an unsupported value is requested, the error message will list
* the supported values for the caller's project.
*/
physicalBlockSizeBytes?: pulumi.Input<number>;
/**
* The ID of the project in which the resource belongs.
* If it is not provided, the provider project is used.
*/
project?: pulumi.Input<string>;
/**
* Indicates how many IOPS must be provisioned for the disk.
*/
provisionedIops?: pulumi.Input<number>;
/**
* Resource policies applied to this disk for automatic snapshot creations.
* ~>**NOTE** This value does not support updating the
* resource policy, as resource policies can not be updated more than
* one at a time. Use
* `gcp.compute.DiskResourcePolicyAttachment`
* to allow for updating the resource policy attached to the disk.
*/
resourcePolicies?: pulumi.Input<pulumi.Input<string>[]>;
/**
* Size of the persistent disk, specified in GB. You can specify this
* field when creating a persistent disk using the `image` or
* `snapshot` parameter, or specify it alone to create an empty
* persistent disk.
* If you specify this field along with `image` or `snapshot`,
* the value must not be less than the size of the image
* or the size of the snapshot.
* ~>**NOTE** If you change the size, the provider updates the disk size
* if upsizing is detected but recreates the disk if downsizing is requested.
* You can add `lifecycle.prevent_destroy` in the config to prevent destroying
* and recreating.
*/
size?: pulumi.Input<number>;
/**
* The source snapshot used to create this disk. You can provide this as
* a partial or full URL to the resource. If the snapshot is in another
* project than this disk, you must supply a full URL. For example, the
* following are valid values:
* * `https://www.googleapis.com/compute/v1/projects/project/global/snapshots/snapshot`
* * `projects/project/global/snapshots/snapshot`
* * `global/snapshots/snapshot`
* * `snapshot`
*/
snapshot?: pulumi.Input<string>;
/**
* The customer-supplied encryption key of the source image. Required if
* the source image is protected by a customer-supplied encryption key.
* Structure is documented below.
*/
sourceImageEncryptionKey?: pulumi.Input<inputs.compute.DiskSourceImageEncryptionKey>;
/**
* The customer-supplied encryption key of the source snapshot. Required
* if the source snapshot is protected by a customer-supplied encryption
* key.
* Structure is documented below.
*/
sourceSnapshotEncryptionKey?: pulumi.Input<inputs.compute.DiskSourceSnapshotEncryptionKey>;
/**
* URL of the disk type resource describing which disk type to use to
* create the disk. Provide this when creating the disk.
*/
type?: pulumi.Input<string>;
/**
* A reference to the zone where the disk resides.
*/
zone?: pulumi.Input<string>;
} | the_stack |
* @group integration/attestation
*/
import type { ICredential, IClaim, KeyringPair } from '@kiltprotocol/types'
import { BlockchainUtils, ExtrinsicErrors } from '@kiltprotocol/chain-helpers'
import {
createOnChainDidFromSeed,
DemoKeystore,
FullDidDetails,
} from '@kiltprotocol/did'
import { BN } from '@polkadot/util'
import { Crypto } from '@kiltprotocol/utils'
import { randomAsHex } from '@polkadot/util-crypto'
import { Attestation } from '../attestation/Attestation'
import { revoke, remove } from '../attestation/Attestation.chain'
import { Credential } from '../credential/Credential'
import { disconnect, init } from '../kilt'
import { Claim } from '../claim/Claim'
import { CType } from '../ctype/CType'
import { RequestForAttestation } from '../requestforattestation/RequestForAttestation'
import {
CtypeOnChain,
DriversLicense,
IsOfficialLicenseAuthority,
devFaucet,
WS_ADDRESS,
keypairFromRandom,
} from './utils'
import '../../../../testingTools/jestErrorCodeMatcher'
let tokenHolder: KeyringPair
let signer: DemoKeystore
let attester: FullDidDetails
let anotherAttester: FullDidDetails
let claimer: FullDidDetails
beforeAll(async () => {
await init({ address: WS_ADDRESS })
tokenHolder = devFaucet
signer = new DemoKeystore()
;[attester, anotherAttester, claimer] = await Promise.all([
createOnChainDidFromSeed(tokenHolder, signer, randomAsHex()),
createOnChainDidFromSeed(tokenHolder, signer, randomAsHex()),
createOnChainDidFromSeed(tokenHolder, signer, randomAsHex()),
])
}, 30_000)
beforeEach(async () => {
await Promise.all(
[attester, anotherAttester, claimer].map((i) => i.refreshTxIndex())
)
})
it('fetches the correct deposit amount', async () => {
const depositAmount = await Attestation.queryDepositAmount()
expect(depositAmount.toString()).toStrictEqual(
new BN(1000000000000000).toString()
)
})
describe('handling attestations that do not exist', () => {
const claimHash = Crypto.hashStr('abcde')
it('Attestation.query', async () => {
return expect(Attestation.query(claimHash)).resolves.toBeNull()
}, 30_000)
it('Attestation.revoke', async () => {
return expect(
Attestation.revoke(claimHash, 0)
.then((tx) =>
attester.authorizeExtrinsic(tx, signer, tokenHolder.address)
)
.then((tx) =>
BlockchainUtils.signAndSubmitTx(tx, tokenHolder, {
resolveOn: BlockchainUtils.IS_IN_BLOCK,
reSign: true,
})
)
).rejects.toThrow()
}, 30_000)
it('Attestation.remove', async () => {
return expect(
Attestation.remove(claimHash, 0)
.then((tx) =>
attester.authorizeExtrinsic(tx, signer, tokenHolder.address)
)
.then((tx) =>
BlockchainUtils.signAndSubmitTx(tx, tokenHolder, {
resolveOn: BlockchainUtils.IS_IN_BLOCK,
reSign: true,
})
)
).rejects.toThrow()
}, 30_000)
})
describe('When there is an attester, claimer and ctype drivers license', () => {
beforeAll(async () => {
const ctypeExists = await CtypeOnChain(DriversLicense)
if (!ctypeExists) {
await attester
.authorizeExtrinsic(
await DriversLicense.store(),
signer,
tokenHolder.address
)
.then((tx) =>
BlockchainUtils.signAndSubmitTx(tx, tokenHolder, {
resolveOn: BlockchainUtils.IS_IN_BLOCK,
reSign: true,
})
)
}
}, 60_000)
it('should be possible to make a claim', async () => {
const content: IClaim['contents'] = { name: 'Ralph', age: 12 }
const claim = Claim.fromCTypeAndClaimContents(
DriversLicense,
content,
claimer.did
)
const request = RequestForAttestation.fromClaim(claim)
await request.signWithDid(signer, claimer)
expect(request.verifyData()).toBe(true)
await expect(request.verifySignature()).resolves.toBe(true)
expect(request.claim.contents).toMatchObject(content)
})
it('should be possible to attest a claim and then claim the attestation deposit back', async () => {
const content: IClaim['contents'] = { name: 'Ralph', age: 12 }
const claim = Claim.fromCTypeAndClaimContents(
DriversLicense,
content,
claimer.did
)
const request = RequestForAttestation.fromClaim(claim)
expect(request.verifyData()).toBe(true)
await request.signWithDid(signer, claimer)
await expect(request.verifySignature()).resolves.toBe(true)
const attestation = Attestation.fromRequestAndDid(request, attester.did)
await attestation
.store()
.then((call) =>
attester.authorizeExtrinsic(call, signer, tokenHolder.address)
)
.then((tx) =>
BlockchainUtils.signAndSubmitTx(tx, tokenHolder, {
resolveOn: BlockchainUtils.IS_IN_BLOCK,
reSign: true,
})
)
const credential = Credential.fromRequestAndAttestation(
request,
attestation
)
expect(credential.verifyData()).toBe(true)
await expect(credential.verify()).resolves.toBe(true)
// Claim the deposit back by submitting the reclaimDeposit extrinsic with the deposit payer's account.
await attestation.reclaimDeposit().then((tx) =>
BlockchainUtils.signAndSubmitTx(tx, tokenHolder, {
resolveOn: BlockchainUtils.IS_IN_BLOCK,
reSign: true,
})
)
// Test that the attestation has been deleted.
await expect(Attestation.query(attestation.claimHash)).resolves.toBeNull()
await expect(attestation.checkValidity()).resolves.toBeFalsy()
}, 60_000)
it('should not be possible to attest a claim without enough tokens', async () => {
const content: IClaim['contents'] = { name: 'Ralph', age: 12 }
const claim = Claim.fromCTypeAndClaimContents(
DriversLicense,
content,
claimer.did
)
const request = RequestForAttestation.fromClaim(claim)
expect(request.verifyData()).toBe(true)
await request.signWithDid(signer, claimer)
await expect(request.verifySignature()).resolves.toBe(true)
const attestation = Attestation.fromRequestAndDid(request, attester.did)
const bobbyBroke = keypairFromRandom()
await expect(
attestation
.store()
.then((call) =>
attester.authorizeExtrinsic(call, signer, bobbyBroke.address)
)
.then((tx) =>
BlockchainUtils.signAndSubmitTx(tx, bobbyBroke, {
resolveOn: BlockchainUtils.IS_IN_BLOCK,
reSign: true,
})
)
).rejects.toThrow()
const credential = Credential.fromRequestAndAttestation(
request,
attestation
)
await expect(credential.verify()).resolves.toBeFalsy()
}, 60_000)
it('should not be possible to attest a claim on a Ctype that is not on chain', async () => {
const badCtype = CType.fromSchema({
$id: 'kilt:ctype:0x1',
$schema: 'http://kilt-protocol.org/draft-01/ctype#',
title: 'badDriversLicense',
properties: {
name: {
type: 'string',
},
weight: {
type: 'integer',
},
},
type: 'object',
})
const content: IClaim['contents'] = { name: 'Ralph', weight: 120 }
const claim = Claim.fromCTypeAndClaimContents(
badCtype,
content,
claimer.did
)
const request = RequestForAttestation.fromClaim(claim)
const attestation = Attestation.fromRequestAndDid(request, attester.did)
await expect(
attestation
.store()
.then((call) =>
attester.authorizeExtrinsic(call, signer, tokenHolder.address)
)
.then((tx) =>
BlockchainUtils.signAndSubmitTx(tx, tokenHolder, {
resolveOn: BlockchainUtils.IS_IN_BLOCK,
reSign: true,
})
)
).rejects.toThrowErrorWithCode(
ExtrinsicErrors.CType.ERROR_CTYPE_NOT_FOUND.code
)
}, 60_000)
describe('when there is a credential on-chain', () => {
let credential: Credential
beforeAll(async () => {
const content: IClaim['contents'] = { name: 'Rolfi', age: 18 }
const claim = Claim.fromCTypeAndClaimContents(
DriversLicense,
content,
claimer.did
)
const request = RequestForAttestation.fromClaim(claim)
await request.signWithDid(signer, claimer)
const attestation = Attestation.fromRequestAndDid(request, attester.did)
await attestation
.store()
.then((call) =>
attester.authorizeExtrinsic(call, signer, tokenHolder.address)
)
.then((tx) =>
BlockchainUtils.signAndSubmitTx(tx, tokenHolder, {
resolveOn: BlockchainUtils.IS_IN_BLOCK,
reSign: true,
})
)
credential = Credential.fromRequestAndAttestation(request, attestation)
await expect(credential.verify()).resolves.toBe(true)
}, 60_000)
it('should not be possible to attest the same claim twice', async () => {
await expect(
credential.attestation
.store()
.then((call) =>
attester.authorizeExtrinsic(call, signer, tokenHolder.address)
)
.then((tx) =>
BlockchainUtils.signAndSubmitTx(tx, tokenHolder, {
resolveOn: BlockchainUtils.IS_IN_BLOCK,
reSign: true,
})
)
).rejects.toThrowErrorWithCode(
ExtrinsicErrors.Attestation.ERROR_ALREADY_ATTESTED.code
)
}, 15_000)
it('should not be possible to use attestation for different claim', async () => {
const content = { name: 'Rolfi', age: 19 }
const claim = Claim.fromCTypeAndClaimContents(
DriversLicense,
content,
claimer.did
)
const request = RequestForAttestation.fromClaim(claim)
await request.signWithDid(signer, claimer)
const fakecredential: ICredential = {
request,
attestation: credential.attestation,
}
await expect(Credential.verify(fakecredential)).resolves.toBeFalsy()
}, 15_000)
it('should not be possible for the claimer to revoke an attestation', async () => {
await expect(
revoke(credential.getHash(), 0)
.then((call) =>
claimer.authorizeExtrinsic(call, signer, tokenHolder.address)
)
.then((tx) =>
BlockchainUtils.signAndSubmitTx(tx, tokenHolder, {
resolveOn: BlockchainUtils.IS_IN_BLOCK,
reSign: true,
})
)
).rejects.toThrowError('not permitted')
await expect(credential.verify()).resolves.toBe(true)
}, 45_000)
it('should be possible for the attester to revoke an attestation', async () => {
await expect(credential.verify()).resolves.toBe(true)
await revoke(credential.getHash(), 0)
.then((call) =>
attester.authorizeExtrinsic(call, signer, tokenHolder.address)
)
.then((tx) =>
BlockchainUtils.signAndSubmitTx(tx, tokenHolder, {
resolveOn: BlockchainUtils.IS_IN_BLOCK,
reSign: true,
})
)
await expect(credential.verify()).resolves.toBeFalsy()
}, 40_000)
it('should be possible for the deposit payer to remove an attestation', async () => {
await remove(credential.getHash(), 0)
.then((call) =>
attester.authorizeExtrinsic(call, signer, tokenHolder.address)
)
.then((tx) =>
BlockchainUtils.signAndSubmitTx(tx, tokenHolder, {
resolveOn: BlockchainUtils.IS_IN_BLOCK,
reSign: true,
})
)
}, 40_000)
})
describe('when there is another Ctype that works as a legitimation', () => {
beforeAll(async () => {
if (!(await CtypeOnChain(IsOfficialLicenseAuthority))) {
await IsOfficialLicenseAuthority.store()
.then((call) =>
attester.authorizeExtrinsic(call, signer, tokenHolder.address)
)
.then((tx) =>
BlockchainUtils.signAndSubmitTx(tx, tokenHolder, {
resolveOn: BlockchainUtils.IS_IN_BLOCK,
reSign: true,
})
)
}
await expect(CtypeOnChain(IsOfficialLicenseAuthority)).resolves.toBe(true)
}, 45_000)
it('can be included in a claim as a legitimation', async () => {
// make credential to be used as legitimation
const licenseAuthorization = Claim.fromCTypeAndClaimContents(
IsOfficialLicenseAuthority,
{
LicenseType: "Driver's License",
LicenseSubtypes: 'sportscars, tanks',
},
attester.did
)
const request1 = RequestForAttestation.fromClaim(licenseAuthorization)
await request1.signWithDid(signer, claimer)
const licenseAuthorizationGranted = Attestation.fromRequestAndDid(
request1,
anotherAttester.did
)
await licenseAuthorizationGranted
.store()
.then((call) =>
anotherAttester.authorizeExtrinsic(call, signer, tokenHolder.address)
)
.then((tx) =>
BlockchainUtils.signAndSubmitTx(tx, tokenHolder, {
resolveOn: BlockchainUtils.IS_IN_BLOCK,
reSign: true,
})
)
// make request including legitimation
const iBelieveICanDrive = Claim.fromCTypeAndClaimContents(
DriversLicense,
{ name: 'Dominic Toretto', age: 52 },
claimer.did
)
const request2 = RequestForAttestation.fromClaim(iBelieveICanDrive, {
legitimations: [
Credential.fromRequestAndAttestation(
request1,
licenseAuthorizationGranted
),
],
})
await request2.signWithDid(signer, claimer)
const LicenseGranted = Attestation.fromRequestAndDid(
request2,
attester.did
)
await LicenseGranted.store()
.then((call) =>
attester.authorizeExtrinsic(call, signer, tokenHolder.address)
)
.then((tx) =>
BlockchainUtils.signAndSubmitTx(tx, tokenHolder, {
resolveOn: BlockchainUtils.IS_IN_BLOCK,
reSign: true,
})
)
const license = Credential.fromRequestAndAttestation(
request2,
LicenseGranted
)
await Promise.all([
expect(license.verify()).resolves.toBe(true),
expect(licenseAuthorizationGranted.checkValidity()).resolves.toBe(true),
])
}, 70_000)
})
})
afterAll(() => {
disconnect()
}) | the_stack |
import { DebugProtocol } from 'vscode-debugprotocol';
import { BreakpointEvent, logger } from 'vscode-debugadapter';
import { ISetBreakpointsArgs, ISetBreakpointsResponseBody, ISetBreakpointResult } from '../debugAdapterInterfaces';
import * as ChromeUtils from './chromeUtils';
import { Protocol as Crdp } from 'devtools-protocol';
import { ReasonType } from './stoppedEvent';
import { InternalSourceBreakpoint } from './internalSourceBreakpoint';
import { ScriptContainer } from './scripts';
import { ChromeDebugAdapter } from '..';
import { IPendingBreakpoint, BreakpointSetResult } from './chromeDebugAdapter';
import * as utils from '../utils';
import * as path from 'path';
import * as nls from 'vscode-nls';
import { ChromeConnection } from './chromeConnection';
let localize = nls.loadMessageBundle();
export interface IHitConditionBreakpoint {
numHits: number;
shouldPause: (numHits: number) => boolean;
}
/**
* Encapsulates all the logic surrounding breakpoints (e.g. set, unset, hit count breakpoints, etc.)
*/
export class Breakpoints {
private static SET_BREAKPOINTS_TIMEOUT = 5000;
private static HITCONDITION_MATCHER = /^(>|>=|=|<|<=|%)?\s*([0-9]+)$/;
private _breakpointIdHandles: utils.ReverseHandles<Crdp.Debugger.BreakpointId>;
private _nextUnboundBreakpointId = 0;
private _pendingBreakpointsByUrl: Map<string, IPendingBreakpoint>;
private _hitConditionBreakpointsById: Map<Crdp.Debugger.BreakpointId, IHitConditionBreakpoint>;
// when working with _committedBreakpointsByUrl, we want to keep the url keys canonicalized for consistency
// use methods getValueFromCommittedBreakpointsByUrl and setValueForCommittedBreakpointsByUrl
private _committedBreakpointsByUrl = new Map<string, ISetBreakpointResult[]>();
private _setBreakpointsRequestQ: Promise<any> = Promise.resolve();
public get breakpointsQueueDrained(): Promise<void> {
return this._setBreakpointsRequestQ;
}
public get committedBreakpointsByUrl(): Map<string, ISetBreakpointResult[]> {
return this._committedBreakpointsByUrl;
}
private getValueFromCommittedBreakpointsByUrl(url: string): ISetBreakpointResult[] {
let canonicalizedUrl = utils.canonicalizeUrl(url);
return this._committedBreakpointsByUrl.get(canonicalizedUrl);
}
private setValueForCommittedBreakpointsByUrl(url: string, value: ISetBreakpointResult[]): void {
let canonicalizedUrl = utils.canonicalizeUrl(url);
this._committedBreakpointsByUrl.set(canonicalizedUrl, value);
}
private get chrome() { return this._chromeConnection.api; }
constructor(
private readonly adapter: ChromeDebugAdapter,
private readonly _chromeConnection: ChromeConnection,
) {
this._breakpointIdHandles = new utils.ReverseHandles<Crdp.Debugger.BreakpointId>();
this._pendingBreakpointsByUrl = new Map<string, IPendingBreakpoint>();
this._hitConditionBreakpointsById = new Map<Crdp.Debugger.BreakpointId, IHitConditionBreakpoint>();
}
reset() {
this._committedBreakpointsByUrl = new Map<string, ISetBreakpointResult[]>();
this._setBreakpointsRequestQ = Promise.resolve();
}
/**
* Using the request object from the DAP, set all breakpoints on the target
* @param args The setBreakpointRequest arguments from the DAP client
* @param scripts The script container associated with this instance of the adapter
* @param requestSeq The request sequence number from the DAP
* @param ids IDs passed in for previously unverified breakpoints
*/
public setBreakpoints(args: ISetBreakpointsArgs, scripts: ScriptContainer, requestSeq: number, ids?: number[]): Promise<ISetBreakpointsResponseBody> {
if (args.source.path) {
args.source.path = this.adapter.displayPathToRealPath(args.source.path);
args.source.path = utils.canonicalizeUrl(args.source.path);
}
return this.validateBreakpointsPath(args)
.then(() => {
// Deep copy the args that we are going to modify, and keep the original values in originalArgs
const originalArgs = args;
args = JSON.parse(JSON.stringify(args));
args = this.adapter.lineColTransformer.setBreakpoints(args);
const sourceMapTransformerResponse = this.adapter.sourceMapTransformer.setBreakpoints(args, requestSeq, ids);
if (sourceMapTransformerResponse && sourceMapTransformerResponse.args) {
args = sourceMapTransformerResponse.args;
}
if (sourceMapTransformerResponse && sourceMapTransformerResponse.ids) {
ids = sourceMapTransformerResponse.ids;
}
args.source = this.adapter.pathTransformer.setBreakpoints(args.source);
// Get the target url of the script
let targetScriptUrl: string;
if (args.source.sourceReference) {
const handle = scripts.getSource(args.source.sourceReference);
if ((!handle || !handle.scriptId) && args.source.path) {
// A sourcemapped script with inline sources won't have a scriptId here, but the
// source.path has been fixed.
targetScriptUrl = args.source.path;
} else {
const targetScript = scripts.getScriptById(handle.scriptId);
if (targetScript) {
targetScriptUrl = targetScript.url;
}
}
} else if (args.source.path) {
targetScriptUrl = args.source.path;
}
if (targetScriptUrl) {
// DebugProtocol sends all current breakpoints for the script. Clear all breakpoints for the script then add all of them
const internalBPs = args.breakpoints.map(bp => new InternalSourceBreakpoint(bp));
const setBreakpointsPFailOnError = this._setBreakpointsRequestQ
.then(() => this.clearAllBreakpoints(targetScriptUrl))
.then(() => this.addBreakpoints(targetScriptUrl, internalBPs, scripts))
.then(responses => ({ breakpoints: this.targetBreakpointResponsesToBreakpointSetResults(targetScriptUrl, responses, internalBPs, ids) }));
const setBreakpointsPTimeout = utils.promiseTimeout(setBreakpointsPFailOnError, Breakpoints.SET_BREAKPOINTS_TIMEOUT, localize('setBPTimedOut', 'Set breakpoints request timed out'));
// Do just one setBreakpointsRequest at a time to avoid interleaving breakpoint removed/breakpoint added requests to Crdp, which causes issues.
// Swallow errors in the promise queue chain so it doesn't get blocked, but return the failing promise for error handling.
this._setBreakpointsRequestQ = setBreakpointsPTimeout.catch(e => {
// Log the timeout, but any other error will be logged elsewhere
if (e.message && e.message.indexOf('timed out') >= 0) {
logger.error(e.stack);
}
});
// Return the setBP request, no matter how long it takes. It may take awhile in Node 7.5 - 7.7, see https://github.com/nodejs/node/issues/11589
return setBreakpointsPFailOnError.then(setBpResultBody => {
const body = { breakpoints: setBpResultBody.breakpoints.map(setBpResult => setBpResult.breakpoint) };
if (body.breakpoints.every(bp => !bp.verified)) {
// We need to send the original args to avoid adjusting the line and column numbers twice here
return this.unverifiedBpResponseForBreakpoints(originalArgs, requestSeq, targetScriptUrl, body.breakpoints, localize('bp.fail.unbound', 'Breakpoint set but not yet bound'));
}
body.breakpoints = this.adapter.sourceMapTransformer.setBreakpointsResponse(body.breakpoints, true, requestSeq) || body.breakpoints;
this.adapter.lineColTransformer.setBreakpointsResponse(body);
return body;
});
} else {
return Promise.resolve(this.unverifiedBpResponse(args, requestSeq, undefined, localize('bp.fail.noscript', "Can't find script for breakpoint request")));
}
},
e => this.unverifiedBpResponse(args, requestSeq, undefined, e.message));
}
protected validateBreakpointsPath(args: ISetBreakpointsArgs): Promise<void> {
if (!args.source.path || args.source.sourceReference) return Promise.resolve();
// When break on load is active, we don't need to validate the path, so return
if (this.adapter.breakOnLoadActive) {
return Promise.resolve();
}
return this.adapter.sourceMapTransformer.getGeneratedPathFromAuthoredPath(args.source.path).then<void>(mappedPath => {
if (!mappedPath) {
return utils.errP(localize('validateBP.sourcemapFail', 'Breakpoint ignored because generated code not found (source map problem?).'));
}
const targetPath = this.adapter.pathTransformer.getTargetPathFromClientPath(mappedPath);
if (!targetPath) {
return utils.errP(localize('validateBP.notFound', 'Breakpoint ignored because target path not found'));
}
return undefined;
});
}
/**
* Makes the actual call to either Debugger.setBreakpoint or Debugger.setBreakpointByUrl, and returns the response.
* Responses from setBreakpointByUrl are transformed to look like the response from setBreakpoint, so they can be
* handled the same.
*/
protected async addBreakpoints(url: string, breakpoints: InternalSourceBreakpoint[], scripts: ScriptContainer) {
let responsePs: Promise<ISetBreakpointResult>[];
if (ChromeUtils.isEvalScript(url)) {
// eval script with no real url - use debugger_setBreakpoint
const scriptId: Crdp.Runtime.ScriptId = utils.lstrip(url, ChromeUtils.EVAL_NAME_PREFIX);
responsePs = breakpoints.map(({ line, column = 0, condition }) => this.chrome.Debugger.setBreakpoint({ location: { scriptId, lineNumber: line, columnNumber: column }, condition }));
} else {
// script that has a url - use debugger_setBreakpointByUrl so that Chrome will rebind the breakpoint immediately
// after refreshing the page. This is the only way to allow hitting breakpoints in code that runs immediately when
// the page loads.
const script = scripts.getScriptByUrl(url);
// If script has been parsed, script object won't be undefined and we would have the mapping file on the disk and we can directly set breakpoint using that
if (!this.adapter.breakOnLoadActive || script) {
const urlRegex = utils.pathToRegex(url);
responsePs = breakpoints.map(({ line, column = 0, condition }) => {
return this.addOneBreakpointByUrl(script && script.scriptId, urlRegex, line, column, condition);
});
} else { // Else if script hasn't been parsed and break on load is active, we need to do extra processing
if (this.adapter.breakOnLoadActive) {
return await this.adapter.breakOnLoadHelper.handleAddBreakpoints(url, breakpoints);
}
}
}
// Join all setBreakpoint requests to a single promise
return Promise.all(responsePs);
}
/**
* Adds a single breakpoint in the target using the url for the script
* @param scriptId the chrome-devtools script id for the script on which we want to add a breakpoint
* @param urlRegex The regular expression string which will be used to find the correct url on which to set the breakpoint
* @param lineNumber Line number of the breakpoint
* @param columnNumber Column number of the breakpoint
* @param condition The (optional) breakpoint condition
*/
async addOneBreakpointByUrl(scriptId: Crdp.Runtime.ScriptId | undefined, urlRegex: string, lineNumber: number, columnNumber: number, condition: string): Promise<ISetBreakpointResult> {
let bpLocation = { lineNumber, columnNumber };
if (this.adapter.columnBreakpointsEnabled && scriptId) { // scriptId undefined when script not yet loaded, can't fix up column BP :(
try {
const possibleBpResponse = await this.chrome.Debugger.getPossibleBreakpoints({
start: { scriptId, lineNumber, columnNumber: 0 },
end: { scriptId, lineNumber: lineNumber + 1, columnNumber: 0 },
restrictToFunction: false });
if (possibleBpResponse.locations.length) {
const selectedLocation = ChromeUtils.selectBreakpointLocation(lineNumber, columnNumber, possibleBpResponse.locations);
bpLocation = { lineNumber: selectedLocation.lineNumber, columnNumber: selectedLocation.columnNumber || 0 };
}
} catch (e) {
// getPossibleBPs not supported
}
}
let result;
try {
result = await this.chrome.Debugger.setBreakpointByUrl({ urlRegex, lineNumber: bpLocation.lineNumber, columnNumber: bpLocation.columnNumber, condition });
} catch (e) {
if (e.message === 'Breakpoint at specified location already exists.') {
return {
actualLocation: { lineNumber: bpLocation.lineNumber, columnNumber: bpLocation.columnNumber, scriptId }
};
} else {
throw e;
}
}
// Now convert the response to a SetBreakpointResponse so both response types can be handled the same
const locations = result.locations;
return <Crdp.Debugger.SetBreakpointResponse>{
breakpointId: result.breakpointId,
actualLocation: locations[0] && {
lineNumber: locations[0].lineNumber,
columnNumber: locations[0].columnNumber,
scriptId
}
};
}
/**
* Using the request object from the DAP, set all breakpoints on the target
* @param args The setBreakpointRequest arguments from the DAP client
* @param scripts The script container associated with this instance of the adapter
* @param requestSeq The request sequence number from the DAP
* @param ids IDs passed in for previously unverified breakpoints
*/
public async getBreakpointsLocations(args: DebugProtocol.BreakpointLocationsArguments, scripts: ScriptContainer, requestSeq: number): Promise<DebugProtocol.BreakpointLocationsResponse['body']> {
if (args.source.path) {
args.source.path = this.adapter.displayPathToRealPath(args.source.path);
args.source.path = utils.canonicalizeUrl(args.source.path);
}
try {
await this.validateBreakpointsPath(args);
} catch (e) {
logger.log('breakpointsLocations failed: ' + e.message);
return { breakpoints: [] };
}
// Deep copy the args that we are going to modify, and keep the original values in originalArgs
args = JSON.parse(JSON.stringify(args));
args.endLine = this.adapter.lineColTransformer.convertClientLineToDebugger(typeof args.endLine === 'number' ? args.endLine : args.line + 1);
args.endColumn = this.adapter.lineColTransformer.convertClientLineToDebugger(args.endColumn || 1);
args.line = this.adapter.lineColTransformer.convertClientLineToDebugger(args.line);
args.column = this.adapter.lineColTransformer.convertClientColumnToDebugger(args.column || 1);
if (args.source.path) {
const source1 = JSON.parse(JSON.stringify(args.source));
const startArgs = this.adapter.sourceMapTransformer.setBreakpoints({ breakpoints: [{ line: args.line, column: args.column }], source: source1 }, requestSeq);
args.line = startArgs.args.breakpoints[0].line;
args.column = startArgs.args.breakpoints[0].column;
const endArgs = this.adapter.sourceMapTransformer.setBreakpoints({ breakpoints: [{ line: args.endLine, column: args.endColumn }], source: args.source }, requestSeq);
args.endLine = endArgs.args.breakpoints[0].line;
args.endColumn = endArgs.args.breakpoints[0].column;
}
args.source = this.adapter.pathTransformer.setBreakpoints(args.source);
// Get the target url of the script
let targetScriptUrl: string;
if (args.source.sourceReference) {
const handle = scripts.getSource(args.source.sourceReference);
if ((!handle || !handle.scriptId) && args.source.path) {
// A sourcemapped script with inline sources won't have a scriptId here, but the
// source.path has been fixed.
targetScriptUrl = args.source.path;
} else {
const targetScript = scripts.getScriptById(handle.scriptId);
if (targetScript) {
targetScriptUrl = targetScript.url;
}
}
} else if (args.source.path) {
targetScriptUrl = args.source.path;
}
if (targetScriptUrl) {
const script = scripts.getScriptByUrl(targetScriptUrl);
if (script) {
const end = typeof args.endLine === 'number' ?
{ scriptId: script.scriptId, lineNumber: args.endLine, columnNumber: args.endColumn || 0 } :
{ scriptId: script.scriptId, lineNumber: args.line + 1, columnNumber: 0 };
const possibleBpResponse = await this.chrome.Debugger.getPossibleBreakpoints({
start: { scriptId: script.scriptId, lineNumber: args.line, columnNumber: args.column || 0 },
end,
restrictToFunction: false
});
if (possibleBpResponse.locations) {
let breakpoints = possibleBpResponse.locations.map(loc => {
return <DebugProtocol.Breakpoint>{
line: loc.lineNumber,
column: loc.columnNumber
};
});
breakpoints = this.adapter.sourceMapTransformer.setBreakpointsResponse(breakpoints, false, requestSeq);
const response = { breakpoints };
this.adapter.lineColTransformer.setBreakpointsResponse(response);
return response as DebugProtocol.BreakpointLocationsResponse['body'];
} else {
return { breakpoints: [] };
}
}
}
return null;
}
/**
* Transform breakpoint responses from the chrome-devtools target to the DAP response
* @param url The URL of the script for which we are translating breakpoint responses
* @param responses The setBreakpoint responses from the chrome-devtools target
* @param requestBps The list of requested breakpoints pending a response
* @param ids IDs passed in for previously unverified BPs
*/
private targetBreakpointResponsesToBreakpointSetResults(url: string, responses: ISetBreakpointResult[], requestBps: InternalSourceBreakpoint[], ids?: number[]): BreakpointSetResult[] {
// Don't cache errored responses
const committedBps = responses
.filter(response => response && response.breakpointId);
// Cache successfully set breakpoint ids from chrome in committedBreakpoints set
this.setValueForCommittedBreakpointsByUrl(url, committedBps);
// Map committed breakpoints to DebugProtocol response breakpoints
return responses
.map((response, i) => {
// The output list needs to be the same length as the input list, so map errors to
// unverified breakpoints.
if (!response) {
return {
isSet: false,
breakpoint: <DebugProtocol.Breakpoint>{
verified: false
}
};
}
// response.breakpointId is undefined when no target BP is backing this BP, e.g. it's at the same location
// as another BP
const responseBpId = response.breakpointId || this.generateNextUnboundBreakpointId();
let bpId: number;
if (ids && ids[i]) {
// IDs passed in for previously unverified BPs
bpId = ids[i];
this._breakpointIdHandles.set(bpId, responseBpId);
} else {
bpId = this._breakpointIdHandles.lookup(responseBpId) ||
this._breakpointIdHandles.create(responseBpId);
}
if (!response.actualLocation) {
// If we don't have an actualLocation nor a breakpointId this is a pseudo-breakpoint because we are using break-on-load
// so we mark the breakpoint as not set, so i'll be set after we load the actual script that has the breakpoint
return {
isSet: response.breakpointId !== undefined,
breakpoint: <DebugProtocol.Breakpoint>{
id: bpId,
verified: false
}
};
}
const thisBpRequest = requestBps[i];
if (thisBpRequest.hitCondition) {
if (!this.addHitConditionBreakpoint(thisBpRequest, response)) {
return {
isSet: true,
breakpoint: <DebugProtocol.Breakpoint>{
id: bpId,
message: localize('invalidHitCondition', 'Invalid hit condition: {0}', thisBpRequest.hitCondition),
verified: false
}
};
}
}
return {
isSet: true,
breakpoint: <DebugProtocol.Breakpoint>{
id: bpId,
verified: true,
line: response.actualLocation.lineNumber,
column: response.actualLocation.columnNumber
}
};
});
}
private addHitConditionBreakpoint(requestBp: InternalSourceBreakpoint, response: ISetBreakpointResult): boolean {
const result = Breakpoints.HITCONDITION_MATCHER.exec(requestBp.hitCondition.trim());
if (result && result.length >= 3) {
let op = result[1] || '>=';
if (op === '=') op = '==';
const value = result[2];
const expr = op === '%'
? `return (numHits % ${value}) === 0;`
: `return numHits ${op} ${value};`;
// eval safe because of the regex, and this is only a string that the current user will type in
/* tslint:disable:no-function-constructor-with-string-args */
const shouldPause: (numHits: number) => boolean = <any>new Function('numHits', expr);
/* tslint:enable:no-function-constructor-with-string-args */
this._hitConditionBreakpointsById.set(response.breakpointId, { numHits: 0, shouldPause });
return true;
} else {
return false;
}
}
private clearAllBreakpoints(url: string): Promise<void> {
// We want to canonicalize this url because this._committedBreakpointsByUrl keeps url keys in canonicalized form
url = utils.canonicalizeUrl(url);
if (!this._committedBreakpointsByUrl.has(url)) {
return Promise.resolve();
}
// Remove breakpoints one at a time. Seems like it would be ok to send the removes all at once,
// but there is a chrome bug where when removing 5+ or so breakpoints at once, it gets into a weird
// state where later adds on the same line will fail with 'breakpoint already exists' even though it
// does not break there.
return this._committedBreakpointsByUrl.get(url).reduce((p, bp) => {
return p.then(() => this.chrome.Debugger.removeBreakpoint({ breakpointId: bp.breakpointId })).then(() => { });
}, Promise.resolve()).then(() => {
this._committedBreakpointsByUrl.delete(url);
});
}
public onBreakpointResolved(params: Crdp.Debugger.BreakpointResolvedEvent, scripts: ScriptContainer): void {
const script = scripts.getScriptById(params.location.scriptId);
const breakpointId = this._breakpointIdHandles.lookup(params.breakpointId);
if (!script || !breakpointId) {
// Breakpoint resolved for a script we don't know about or a breakpoint we don't know about
return;
}
// If the breakpoint resolved is a stopOnEntry breakpoint, we just return since we don't need to send it to client
if (this.adapter.breakOnLoadActive && this.adapter.breakOnLoadHelper.stopOnEntryBreakpointIdToRequestedFileName.has(params.breakpointId)) {
return;
}
// committed breakpoints (this._committedBreakpointsByUrl) should always have url keys in canonicalized form
const committedBps = this.getValueFromCommittedBreakpointsByUrl(script.url) || [];
if (!committedBps.find(committedBp => committedBp.breakpointId === params.breakpointId)) {
committedBps.push({breakpointId: params.breakpointId, actualLocation: params.location});
}
this.setValueForCommittedBreakpointsByUrl(script.url, committedBps);
const bp = <DebugProtocol.Breakpoint>{
id: breakpointId,
verified: true,
line: params.location.lineNumber,
column: params.location.columnNumber
};
// need to canonicalize this path because the following maps use paths canonicalized
const scriptPath = utils.canonicalizeUrl(this.adapter.pathTransformer.breakpointResolved(bp, script.url));
if (this._pendingBreakpointsByUrl.has(scriptPath)) {
// If we set these BPs before the script was loaded, remove from the pending list
this._pendingBreakpointsByUrl.delete(scriptPath);
}
this.adapter.sourceMapTransformer.breakpointResolved(bp, scriptPath);
this.adapter.lineColTransformer.breakpointResolved(bp);
this.adapter.session.sendEvent(new BreakpointEvent('changed', bp));
}
private generateNextUnboundBreakpointId(): string {
const unboundBreakpointUniquePrefix = '__::[vscode_chrome_debug_adapter_unbound_breakpoint]::';
return `${unboundBreakpointUniquePrefix}${this._nextUnboundBreakpointId++}`;
}
private unverifiedBpResponse(args: ISetBreakpointsArgs, requestSeq: number, targetScriptUrl: string, message?: string): ISetBreakpointsResponseBody {
const breakpoints = args.breakpoints.map(bp => {
return <DebugProtocol.Breakpoint>{
verified: false,
line: bp.line,
column: bp.column,
message,
id: this._breakpointIdHandles.create(this.generateNextUnboundBreakpointId())
};
});
return this.unverifiedBpResponseForBreakpoints(args, requestSeq, targetScriptUrl, breakpoints, message);
}
private unverifiedBpResponseForBreakpoints(args: ISetBreakpointsArgs, requestSeq: number, targetScriptUrl: string, breakpoints: DebugProtocol.Breakpoint[], defaultMessage?: string): ISetBreakpointsResponseBody {
breakpoints.forEach(bp => {
if (!bp.message) {
bp.message = defaultMessage;
}
});
if (args.source.path) {
const ids = breakpoints.map(bp => bp.id);
// setWithPath: record whether we attempted to set the breakpoint, and if so, with which path.
// We can use this to tell when the script is loaded whether we guessed correctly, and predict whether the BP will bind.
this._pendingBreakpointsByUrl.set(
utils.canonicalizeUrl(args.source.path),
{ args, ids, requestSeq, setWithPath: this.adapter.breakOnLoadActive ? '' : targetScriptUrl }); // Breakpoints need to be re-set when break-on-load is enabled
}
return { breakpoints };
}
public async handleScriptParsed(script: Crdp.Debugger.ScriptParsedEvent, scripts: ScriptContainer, mappedUrl: string, sources: string[]) {
if (sources) {
const filteredSources = sources.filter(source => source !== mappedUrl); // Tools like babel-register will produce sources with the same path as the generated script
for (const filteredSource of filteredSources) {
await this.resolvePendingBPs(filteredSource, scripts);
}
}
if (utils.canonicalizeUrl(script.url) === mappedUrl && this._pendingBreakpointsByUrl.has(mappedUrl) && utils.canonicalizeUrl(this._pendingBreakpointsByUrl.get(mappedUrl).setWithPath) === utils.canonicalizeUrl(mappedUrl)) {
// If the pathTransformer had no effect, and we attempted to set the BPs with that path earlier, then assume that they are about
// to be resolved in this loaded script, and remove the pendingBP.
this._pendingBreakpointsByUrl.delete(mappedUrl);
} else {
await this.resolvePendingBPs(mappedUrl, scripts);
}
}
public async resolvePendingBPs (source: string, scripts: ScriptContainer) {
source = source && utils.canonicalizeUrl(source);
const pendingBP = this._pendingBreakpointsByUrl.get(source);
if (pendingBP && (!pendingBP.setWithPath || utils.canonicalizeUrl(pendingBP.setWithPath) === source)) {
logger.log(`OnScriptParsed.resolvePendingBPs: Resolving pending breakpoints: ${JSON.stringify(pendingBP)}`);
await this.resolvePendingBreakpoint(pendingBP, scripts);
this._pendingBreakpointsByUrl.delete(source);
} else if (source) {
const sourceFileName = path.basename(source).toLowerCase();
if (Array.from(this._pendingBreakpointsByUrl.keys()).find(key => key.toLowerCase().indexOf(sourceFileName) > -1)) {
logger.log(`OnScriptParsed.resolvePendingBPs: The following pending breakpoints won't be resolved: ${JSON.stringify(pendingBP)} pendingBreakpointsByUrl = ${JSON.stringify([...this._pendingBreakpointsByUrl])} source = ${source}`);
}
}
}
public resolvePendingBreakpoint(pendingBP: IPendingBreakpoint, scripts: ScriptContainer): Promise<void> {
return this.setBreakpoints(pendingBP.args, scripts, pendingBP.requestSeq, pendingBP.ids).then(response => {
response.breakpoints.forEach((bp, i) => {
bp.id = pendingBP.ids[i];
this.adapter.session.sendEvent(new BreakpointEvent('changed', bp));
});
});
}
public handleHitCountBreakpoints(expectingStopReason: ReasonType, hitBreakpoints) {
// Did we hit a hit condition breakpoint?
for (let hitBp of hitBreakpoints) {
if (this._hitConditionBreakpointsById.has(hitBp)) {
// Increment the hit count and check whether to pause
const hitConditionBp = this._hitConditionBreakpointsById.get(hitBp);
hitConditionBp.numHits++;
// Only resume if we didn't break for some user action (step, pause button)
if (!expectingStopReason && !hitConditionBp.shouldPause(hitConditionBp.numHits)) {
this.chrome.Debugger.resume()
.catch(() => { /* ignore failures */ });
return { didPause: false };
}
}
}
return null;
}
} | the_stack |
import { BaseResource, CloudError, AzureServiceClientOptions } from "@azure/ms-rest-azure-js";
import * as msRest from "@azure/ms-rest-js";
export { BaseResource, CloudError };
/**
* The shipping address of the customer.
*/
export interface Address {
/**
* The address line1.
*/
addressLine1: string;
/**
* The address line2.
*/
addressLine2?: string;
/**
* The address line3.
*/
addressLine3?: string;
/**
* The postal code.
*/
postalCode: string;
/**
* The city name.
*/
city: string;
/**
* The state name.
*/
state: string;
/**
* The country name.
*/
country: string;
}
/**
* Error details for the alert.
*/
export interface AlertErrorDetails {
/**
* Error code.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly errorCode?: string;
/**
* Error Message.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly errorMessage?: string;
/**
* Number of occurrences.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly occurrences?: number;
}
/**
* Represents the base class for all object models.
*/
export interface ARMBaseModel extends BaseResource {
/**
* The path ID that uniquely identifies the object.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
/**
* The object name.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: string;
/**
* The hierarchical type of the object.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly type?: string;
}
/**
* Alert on the data box edge/gateway device.
*/
export interface Alert extends ARMBaseModel {
/**
* Alert title.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly title?: string;
/**
* Alert type.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly alertType?: string;
/**
* UTC time when the alert appeared.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly appearedAtDateTime?: Date;
/**
* Alert recommendation.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly recommendation?: string;
/**
* Severity of the alert. Possible values include: 'Informational', 'Warning', 'Critical'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly severity?: AlertSeverity;
/**
* Error details of the alert.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly errorDetails?: AlertErrorDetails;
/**
* Alert details.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly detailedInformation?: { [propertyName: string]: string };
}
/**
* Represent the secrets intended for encryption with asymmetric key pair.
*/
export interface AsymmetricEncryptedSecret {
/**
* The value of the secret.
*/
value: string;
/**
* Thumbprint certificate used to encrypt \"Value\". If the value is unencrypted, it will be
* null.
*/
encryptionCertThumbprint?: string;
/**
* The algorithm used to encrypt "Value". Possible values include: 'None', 'AES256',
* 'RSAES_PKCS1_v_1_5'
*/
encryptionAlgorithm: EncryptionAlgorithm;
}
/**
* Symmetric key for authentication.
*/
export interface SymmetricKey {
/**
* Connection string based on the symmetric key.
*/
connectionString?: AsymmetricEncryptedSecret;
}
/**
* Authentication mechanism for IoT devices.
*/
export interface Authentication {
/**
* Symmetric key for authentication.
*/
symmetricKey?: SymmetricKey;
}
/**
* Azure container mapping of the endpoint.
*/
export interface AzureContainerInfo {
/**
* ID of the storage account credential used to access storage.
*/
storageAccountCredentialId: string;
/**
* Container name (Based on the data format specified, this represents the name of Azure
* Files/Page blob/Block blob).
*/
containerName: string;
/**
* Storage format used for the file represented by the share. Possible values include:
* 'BlockBlob', 'PageBlob', 'AzureFile'
*/
dataFormat: AzureContainerDataFormat;
}
/**
* The bandwidth schedule details.
*/
export interface BandwidthSchedule extends ARMBaseModel {
/**
* The start time of the schedule in UTC.
*/
start: string;
/**
* The stop time of the schedule in UTC.
*/
stop: string;
/**
* The bandwidth rate in Mbps.
*/
rateInMbps: number;
/**
* The days of the week when this schedule is applicable.
*/
days: DayOfWeek[];
}
/**
* The mapping between a particular client IP and the type of access client has on the NFS share.
*/
export interface ClientAccessRight {
/**
* IP of the client.
*/
client: string;
/**
* Type of access to be allowed for the client. Possible values include: 'NoAccess', 'ReadOnly',
* 'ReadWrite'
*/
accessPermission: ClientPermissionType;
}
/**
* Contains all the contact details of the customer.
*/
export interface ContactDetails {
/**
* The contact person name.
*/
contactPerson: string;
/**
* The name of the company.
*/
companyName: string;
/**
* The phone number.
*/
phone: string;
/**
* The email list.
*/
emailList: string[];
}
/**
* The SKU type.
*/
export interface Sku {
/**
* SKU name. Possible values include: 'Gateway', 'Edge'
*/
name?: SkuName;
/**
* The SKU tier. This is based on the SKU name. Possible values include: 'Standard'
*/
tier?: SkuTier;
}
/**
* The Data Box Edge/Gateway device.
*/
export interface DataBoxEdgeDevice extends ARMBaseModel {
/**
* The location of the device. This is a supported and registered Azure geographical region (for
* example, West US, East US, or Southeast Asia). The geographical region of a device cannot be
* changed once it is created, but if an identical geographical region is specified on update,
* the request will succeed.
*/
location: string;
/**
* The list of tags that describe the device. These tags can be used to view and group this
* device (across resource groups).
*/
tags?: { [propertyName: string]: string };
/**
* The SKU type.
*/
sku?: Sku;
/**
* The etag for the devices.
*/
etag?: string;
/**
* The status of the Data Box Edge/Gateway device. Possible values include: 'ReadyToSetup',
* 'Online', 'Offline', 'NeedsAttention', 'Disconnected', 'PartiallyDisconnected'
*/
dataBoxEdgeDeviceStatus?: DataBoxEdgeDeviceStatus;
/**
* The Serial Number of Data Box Edge/Gateway device.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly serialNumber?: string;
/**
* The Description of the Data Box Edge/Gateway device.
*/
description?: string;
/**
* The description of the Data Box Edge/Gateway device model.
*/
modelDescription?: string;
/**
* The type of the Data Box Edge/Gateway device. Possible values include: 'DataBoxEdgeDevice'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly deviceType?: DeviceType;
/**
* The Data Box Edge/Gateway device name.
*/
friendlyName?: string;
/**
* The Data Box Edge/Gateway device culture.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly culture?: string;
/**
* The Data Box Edge/Gateway device model.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly deviceModel?: string;
/**
* The Data Box Edge/Gateway device software version.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly deviceSoftwareVersion?: string;
/**
* The Data Box Edge/Gateway device local capacity in MB.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly deviceLocalCapacity?: number;
/**
* The Data Box Edge/Gateway device timezone.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly timeZone?: string;
/**
* The device software version number of the device (eg: 1.2.18105.6).
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly deviceHcsVersion?: string;
/**
* Type of compute roles configured.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly configuredRoleTypes?: RoleTypes[];
}
/**
* The extended Info of the Data Box Edge/Gateway device.
*/
export interface DataBoxEdgeDeviceExtendedInfo extends ARMBaseModel {
/**
* The digital signature of encrypted certificate.
*/
encryptionKeyThumbprint?: string;
/**
* The public part of the encryption certificate. Client uses this to encrypt any secret.
*/
encryptionKey?: string;
/**
* The Resource ID of the Resource.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly resourceKey?: string;
}
/**
* The Data Box Edge/Gateway device patch.
*/
export interface DataBoxEdgeDevicePatch {
/**
* The tags attached to the Data Box Edge/Gateway resource.
*/
tags?: { [propertyName: string]: string };
}
/**
* File source details.
*/
export interface FileSourceInfo {
/**
* File share ID.
*/
shareId: string;
}
/**
* Compute role against which events will be raised.
*/
export interface RoleSinkInfo {
/**
* Compute role ID.
*/
roleId: string;
}
/**
* Contains the possible cases for Trigger.
*/
export type TriggerUnion = Trigger | FileEventTrigger | PeriodicTimerEventTrigger;
/**
* Trigger details.
*/
export interface Trigger {
/**
* Polymorphic Discriminator
*/
kind: "Trigger";
/**
* The path ID that uniquely identifies the object.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
/**
* The object name.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: string;
/**
* The hierarchical type of the object.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly type?: string;
}
/**
* Trigger details.
*/
export interface FileEventTrigger {
/**
* Polymorphic Discriminator
*/
kind: "FileEvent";
/**
* The path ID that uniquely identifies the object.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
/**
* The object name.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: string;
/**
* The hierarchical type of the object.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly type?: string;
/**
* File event source details.
*/
sourceInfo: FileSourceInfo;
/**
* Role sink info.
*/
sinkInfo: RoleSinkInfo;
/**
* A custom context tag typically used to correlate the trigger against its usage. For example,
* if a periodic timer trigger is intended for certain specific IoT modules in the device, the
* tag can be the name or the image URL of the module.
*/
customContextTag?: string;
}
/**
* Metadata of IoT device/IoT Edge device to be configured.
*/
export interface IoTDeviceInfo {
/**
* ID of the IoT device/edge device.
*/
deviceId: string;
/**
* Host name for the IoT hub associated to the device.
*/
ioTHostHub: string;
/**
* IoT device authentication info.
*/
authentication?: Authentication;
}
/**
* The share mount point.
*/
export interface MountPointMap {
/**
* ID of the share mounted to the role VM.
*/
shareId: string;
/**
* ID of the role to which share is mounted.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly roleId?: string;
/**
* Mount point for the share.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly mountPoint?: string;
/**
* Role type. Possible values include: 'IOT', 'ASA', 'Functions', 'Cognitive'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly roleType?: RoleTypes;
}
/**
* Contains the possible cases for Role.
*/
export type RoleUnion = Role | IoTRole;
/**
* Compute role.
*/
export interface Role {
/**
* Polymorphic Discriminator
*/
kind: "Role";
/**
* The path ID that uniquely identifies the object.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
/**
* The object name.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: string;
/**
* The hierarchical type of the object.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly type?: string;
}
/**
* Compute role.
*/
export interface IoTRole {
/**
* Polymorphic Discriminator
*/
kind: "IOT";
/**
* The path ID that uniquely identifies the object.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
/**
* The object name.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: string;
/**
* The hierarchical type of the object.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly type?: string;
/**
* Host OS supported by the IoT role. Possible values include: 'Windows', 'Linux'
*/
hostPlatform: PlatformType;
/**
* IoT device metadata to which data box edge device needs to be connected.
*/
ioTDeviceDetails: IoTDeviceInfo;
/**
* IoT edge device to which the IoT role needs to be configured.
*/
ioTEdgeDeviceDetails: IoTDeviceInfo;
/**
* Mount points of shares in role(s).
*/
shareMappings?: MountPointMap[];
/**
* Role status. Possible values include: 'Enabled', 'Disabled'
*/
roleStatus: RoleStatus;
}
/**
* Details related to the IPv4 address configuration.
*/
export interface Ipv4Config {
/**
* The IPv4 address of the network adapter.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly ipAddress?: string;
/**
* The IPv4 subnet of the network adapter.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly subnet?: string;
/**
* The IPv4 gateway of the network adapter.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly gateway?: string;
}
/**
* Details related to the IPv6 address configuration.
*/
export interface Ipv6Config {
/**
* The IPv6 address of the network adapter.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly ipAddress?: string;
/**
* The IPv6 prefix of the network adapter.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly prefixLength?: number;
/**
* The IPv6 gateway of the network adapter.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly gateway?: string;
}
/**
* The job error items.
*/
export interface JobErrorItem {
/**
* The recommended actions.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly recommendations?: string[];
/**
* The code intended for programmatic access.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly code?: string;
/**
* The message that describes the error in detail.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly message?: string;
}
/**
* The job error information containing the list of job errors.
*/
export interface JobErrorDetails {
/**
* The error details.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly errorDetails?: JobErrorItem[];
/**
* The code intended for programmatic access.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly code?: string;
/**
* The message that describes the error in detail.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly message?: string;
}
/**
* Details about the download progress of update.
*/
export interface UpdateDownloadProgress {
/**
* The download phase. Possible values include: 'Unknown', 'Initializing', 'Downloading',
* 'Verifying'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly downloadPhase?: DownloadPhase;
/**
* Percentage of completion.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly percentComplete?: number;
/**
* Total bytes to download.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly totalBytesToDownload?: number;
/**
* Total bytes downloaded.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly totalBytesDownloaded?: number;
/**
* Number of updates to download.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly numberOfUpdatesToDownload?: number;
/**
* Number of updates downloaded.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly numberOfUpdatesDownloaded?: number;
}
/**
* Progress details during installation of updates.
*/
export interface UpdateInstallProgress {
/**
* Percentage completed.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly percentComplete?: number;
/**
* Number of updates to install.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly numberOfUpdatesToInstall?: number;
/**
* Number of updates installed.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly numberOfUpdatesInstalled?: number;
}
/**
* A device job.
*/
export interface Job {
/**
* The path ID that uniquely identifies the object.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
/**
* The name of the object.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: string;
/**
* The hierarchical type of the object.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly type?: string;
/**
* The current status of the job. Possible values include: 'Invalid', 'Running', 'Succeeded',
* 'Failed', 'Canceled', 'Paused', 'Scheduled'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly status?: JobStatus;
/**
* The UTC date and time at which the job started.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly startTime?: Date;
/**
* The UTC date and time at which the job completed.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly endTime?: Date;
/**
* The percentage of the job that is complete.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly percentComplete?: number;
/**
* The error details.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly error?: JobErrorDetails;
/**
* The type of the job. Possible values include: 'Invalid', 'ScanForUpdates', 'DownloadUpdates',
* 'InstallUpdates', 'RefreshShare'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly jobType?: JobType;
/**
* Current stage of the update operation. Possible values include: 'Unknown', 'Initial',
* 'ScanStarted', 'ScanComplete', 'ScanFailed', 'DownloadStarted', 'DownloadComplete',
* 'DownloadFailed', 'InstallStarted', 'InstallComplete', 'InstallFailed', 'RebootInitiated',
* 'Success', 'Failure', 'RescanStarted', 'RescanComplete', 'RescanFailed'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly currentStage?: UpdateOperationStage;
/**
* The download progress.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly downloadProgress?: UpdateDownloadProgress;
/**
* The install progress.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly installProgress?: UpdateInstallProgress;
/**
* Total number of errors encountered during the refresh process.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly totalRefreshErrors?: number;
/**
* Local share/remote container relative path to the error manifest file of the refresh.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly errorManifestFile?: string;
/**
* ARM ID of the share that was refreshed.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly shareId?: string;
/**
* If only subfolders need to be refreshed, then the subfolder path inside the share. (The path
* is empty if there are no subfolders.)
*/
folder?: string;
}
/**
* Metric Dimension v1.
*/
export interface MetricDimensionV1 {
/**
* Name of the metrics dimension.
*/
name?: string;
/**
* Display name of the metrics dimension.
*/
displayName?: string;
/**
* To be exported to shoe box.
*/
toBeExportedForShoebox?: boolean;
}
/**
* Metric specification version 1.
*/
export interface MetricSpecificationV1 {
/**
* Name of the metric.
*/
name?: string;
/**
* Display name of the metric.
*/
displayName?: string;
/**
* Description of the metric to be displayed.
*/
displayDescription?: string;
/**
* Metric units. Possible values include: 'NotSpecified', 'Percent', 'Count', 'Seconds',
* 'Milliseconds', 'Bytes', 'BytesPerSecond', 'CountPerSecond'
*/
unit?: MetricUnit;
/**
* Metric aggregation type. Possible values include: 'NotSpecified', 'None', 'Average',
* 'Minimum', 'Maximum', 'Total', 'Count'
*/
aggregationType?: MetricAggregationType;
/**
* Metric dimensions, other than default dimension which is resource.
*/
dimensions?: MetricDimensionV1[];
/**
* Set true to fill the gaps with zero.
*/
fillGapWithZero?: boolean;
/**
* Metric category. Possible values include: 'Capacity', 'Transaction'
*/
category?: MetricCategory;
/**
* Resource name override.
*/
resourceIdDimensionNameOverride?: string;
/**
* Support granularity of metrics.
*/
supportedTimeGrainTypes?: TimeGrain[];
/**
* Support metric aggregation type.
*/
supportedAggregationTypes?: MetricAggregationType[];
}
/**
* The network adapter position.
*/
export interface NetworkAdapterPosition {
/**
* The network group. Possible values include: 'None', 'NonRDMA', 'RDMA'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly networkGroup?: NetworkGroup;
/**
* The port.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly port?: number;
}
/**
* Represents the networkAdapter on a device.
*/
export interface NetworkAdapter {
/**
* Instance ID of network adapter.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly adapterId?: string;
/**
* Hardware position of network adapter.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly adapterPosition?: NetworkAdapterPosition;
/**
* Logical index of the adapter.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly index?: number;
/**
* Node ID of the network adapter.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nodeId?: string;
/**
* Network adapter name.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly networkAdapterName?: string;
/**
* Hardware label for the adapter.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly label?: string;
/**
* MAC address.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly macAddress?: string;
/**
* Link speed.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly linkSpeed?: number;
/**
* Value indicating whether this adapter is valid. Possible values include: 'Inactive', 'Active'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly status?: NetworkAdapterStatus;
/**
* Value indicating whether this adapter is RDMA capable. Possible values include: 'Incapable',
* 'Capable'
*/
rdmaStatus?: NetworkAdapterRDMAStatus;
/**
* Value indicating whether this adapter has DHCP enabled. Possible values include: 'Disabled',
* 'Enabled'
*/
dhcpStatus?: NetworkAdapterDHCPStatus;
/**
* The IPv4 configuration of the network adapter.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly ipv4Configuration?: Ipv4Config;
/**
* The IPv6 configuration of the network adapter.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly ipv6Configuration?: Ipv6Config;
/**
* The IPv6 local address.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly ipv6LinkLocalAddress?: string;
/**
* The list of DNS Servers of the device.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly dnsServers?: string[];
}
/**
* The network settings of a device.
*/
export interface NetworkSettings extends ARMBaseModel {
/**
* The network adapter list on the device.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly networkAdapters?: NetworkAdapter[];
}
/**
* Operation display properties.
*/
export interface OperationDisplay {
/**
* Provider name.
*/
provider?: string;
/**
* The type of resource in which the operation is performed.
*/
resource?: string;
/**
* Operation to be performed on the resource.
*/
operation?: string;
/**
* Description of the operation to be performed.
*/
description?: string;
}
/**
* Service specification.
*/
export interface ServiceSpecification {
/**
* Metric specification as defined by shoebox.
*/
metricSpecifications?: MetricSpecificationV1[];
}
/**
* Operations.
*/
export interface Operation {
/**
* Name of the operation.
*/
name?: string;
/**
* Properties displayed for the operation.
*/
display?: OperationDisplay;
/**
* Origin of the operation.
*/
origin?: string;
/**
* Service specification.
*/
serviceSpecification?: ServiceSpecification;
}
/**
* Represents a single status change.
*/
export interface OrderStatus {
/**
* Status of the order as per the allowed status types. Possible values include: 'Untracked',
* 'AwaitingFulfilment', 'AwaitingPreparation', 'AwaitingShipment', 'Shipped', 'Arriving',
* 'Delivered', 'ReplacementRequested', 'LostDevice', 'Declined', 'ReturnInitiated',
* 'AwaitingReturnShipment', 'ShippedBack', 'CollectedAtMicrosoft'
*/
status: OrderState;
/**
* Time of status update.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly updateDateTime?: Date;
/**
* Comments related to this status change.
*/
comments?: string;
}
/**
* Tracking courier information.
*/
export interface TrackingInfo {
/**
* Serial number of the device being tracked.
*/
serialNumber?: string;
/**
* Name of the carrier used in the delivery.
*/
carrierName?: string;
/**
* Tracking ID of the shipment.
*/
trackingId?: string;
/**
* Tracking URL of the shipment.
*/
trackingUrl?: string;
}
/**
* The order details.
*/
export interface Order extends ARMBaseModel {
/**
* The contact details.
*/
contactInformation: ContactDetails;
/**
* The shipping address.
*/
shippingAddress: Address;
/**
* Current status of the order.
*/
currentStatus?: OrderStatus;
/**
* List of status changes in the order.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly orderHistory?: OrderStatus[];
/**
* Serial number of the device.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly serialNumber?: string;
/**
* Tracking information for the package delivered to the customer whether it has an original or a
* replacement device.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly deliveryTrackingInfo?: TrackingInfo[];
/**
* Tracking information for the package returned from the customer whether it has an original or
* a replacement device.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly returnTrackingInfo?: TrackingInfo[];
}
/**
* Periodic timer event source.
*/
export interface PeriodicTimerSourceInfo {
/**
* The time of the day that results in a valid trigger. Schedule is computed with reference to
* the time specified up to seconds. If timezone is not specified the time will considered to be
* in device timezone. The value will always be returned as UTC time.
*/
startTime: Date;
/**
* Periodic frequency at which timer event needs to be raised. Supports daily, hourly, minutes,
* and seconds.
*/
schedule: string;
/**
* Topic where periodic events are published to IoT device.
*/
topic?: string;
}
/**
* Trigger details.
*/
export interface PeriodicTimerEventTrigger {
/**
* Polymorphic Discriminator
*/
kind: "PeriodicTimerEvent";
/**
* The path ID that uniquely identifies the object.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
/**
* The object name.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: string;
/**
* The hierarchical type of the object.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly type?: string;
/**
* Periodic timer details.
*/
sourceInfo: PeriodicTimerSourceInfo;
/**
* Role Sink information.
*/
sinkInfo: RoleSinkInfo;
/**
* A custom context tag typically used to correlate the trigger against its usage. For example,
* if a periodic timer trigger is intended for certain specific IoT modules in the device, the
* tag can be the name or the image URL of the module.
*/
customContextTag?: string;
}
/**
* Fields for tracking refresh job on the share.
*/
export interface RefreshDetails {
/**
* If a refresh share job is currently in progress on this share, this field indicates the ARM
* resource ID of that job. The field is empty if no job is in progress.
*/
inProgressRefreshJobId?: string;
/**
* Indicates the completed time for the last refresh job on this particular share, if any.This
* could be a failed job or a successful job.
*/
lastCompletedRefreshJobTimeInUTC?: Date;
/**
* Indicates the relative path of the error xml for the last refresh job on this particular
* share, if any. This could be a failed job or a successful job.
*/
errorManifestFile?: string;
/**
* Indicates the id of the last refresh job on this particular share,if any. This could be a
* failed job or a successful job.
*/
lastJob?: string;
}
/**
* The security settings of a device.
*/
export interface SecuritySettings extends ARMBaseModel {
/**
* Device administrator password as an encrypted string (encrypted using RSA PKCS #1) is used to
* sign into the local web UI of the device. The Actual password should have at least 8
* characters that are a combination of uppercase, lowercase, numeric, and special characters.
*/
deviceAdminPassword: AsymmetricEncryptedSecret;
}
/**
* The mapping between a particular user and the access type on the SMB share.
*/
export interface UserAccessRight {
/**
* User ID (already existing in the device).
*/
userId: string;
/**
* Type of access to be allowed for the user. Possible values include: 'Change', 'Read', 'Custom'
*/
accessType: ShareAccessType;
}
/**
* Represents a share on the Data Box Edge/Gateway device.
*/
export interface Share extends ARMBaseModel {
/**
* Description for the share.
*/
description?: string;
/**
* Current status of the share. Possible values include: 'Online', 'Offline'
*/
shareStatus: ShareStatus;
/**
* Current monitoring status of the share. Possible values include: 'Enabled', 'Disabled'
*/
monitoringStatus: MonitoringStatus;
/**
* Azure container mapping for the share.
*/
azureContainerInfo?: AzureContainerInfo;
/**
* Access protocol to be used by the share. Possible values include: 'SMB', 'NFS'
*/
accessProtocol: ShareAccessProtocol;
/**
* Mapping of users and corresponding access rights on the share (required for SMB protocol).
*/
userAccessRights?: UserAccessRight[];
/**
* List of IP addresses and corresponding access rights on the share(required for NFS protocol).
*/
clientAccessRights?: ClientAccessRight[];
/**
* Details of the refresh job on this share.
*/
refreshDetails?: RefreshDetails;
/**
* Share mount point to the role.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly shareMappings?: MountPointMap[];
/**
* Data policy of the share. Possible values include: 'Cloud', 'Local'
*/
dataPolicy?: DataPolicy;
}
/**
* Specifies the mapping between this particular user and the type of access he has on shares on
* this device.
*/
export interface ShareAccessRight {
/**
* The share ID.
*/
shareId: string;
/**
* Type of access to be allowed on the share for this user. Possible values include: 'Change',
* 'Read', 'Custom'
*/
accessType: ShareAccessType;
}
/**
* The storage account credential.
*/
export interface StorageAccountCredential extends ARMBaseModel {
/**
* Alias for the storage account.
*/
alias: string;
/**
* Username for the storage account.
*/
userName?: string;
/**
* Encrypted storage key.
*/
accountKey?: AsymmetricEncryptedSecret;
/**
* Connection string for the storage account. Use this string if username and account key are not
* specified.
*/
connectionString?: string;
/**
* Signifies whether SSL needs to be enabled or not. Possible values include: 'Enabled',
* 'Disabled'
*/
sslStatus: SSLStatus;
/**
* Blob end point for private clouds.
*/
blobDomainName?: string;
/**
* Type of storage accessed on the storage account. Possible values include:
* 'GeneralPurposeStorage', 'BlobStorage'
*/
accountType: AccountType;
}
/**
* Details about ongoing updates and availability of updates on the device.
*/
export interface UpdateSummary extends ARMBaseModel {
/**
* The current version of the device in format: 1.2.17312.13.",
*/
deviceVersionNumber?: string;
/**
* The current version of the device in text format.
*/
friendlyDeviceVersionName?: string;
/**
* The last time when a scan was done on the device.
*/
deviceLastScannedDateTime?: Date;
/**
* The time when the last scan job was completed (success/cancelled/failed) on the appliance.
*/
lastCompletedScanJobDateTime?: Date;
/**
* The time when the last Download job was completed (success/cancelled/failed) on the appliance.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly lastCompletedDownloadJobDateTime?: Date;
/**
* The time when the last Install job was completed (success/cancelled/failed) on the appliance.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly lastCompletedInstallJobDateTime?: Date;
/**
* The number of updates available for the current device version as per the last device scan.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly totalNumberOfUpdatesAvailable?: number;
/**
* The total number of items pending download.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly totalNumberOfUpdatesPendingDownload?: number;
/**
* The total number of items pending install.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly totalNumberOfUpdatesPendingInstall?: number;
/**
* Indicates if updates are available and at least one of the updates needs a reboot. Possible
* values include: 'NeverReboots', 'RequiresReboot', 'RequestReboot'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly rebootBehavior?: InstallRebootBehavior;
/**
* The current update operation. Possible values include: 'None', 'Scan', 'Download', 'Install'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly ongoingUpdateOperation?: UpdateOperation;
/**
* The job ID of the download job in progress.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly inProgressDownloadJobId?: string;
/**
* The job ID of the install job in progress.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly inProgressInstallJobId?: string;
/**
* The time when the currently running download (if any) started.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly inProgressDownloadJobStartedDateTime?: Date;
/**
* The time when the currently running install (if any) started.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly inProgressInstallJobStartedDateTime?: Date;
/**
* The list of updates available for install.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly updateTitles?: string[];
/**
* The total size of updates available for download in bytes.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly totalUpdateSizeInBytes?: number;
}
/**
* The upload certificate request.
*/
export interface UploadCertificateRequest {
/**
* The authentication type. Possible values include: 'Invalid', 'AzureActiveDirectory'
*/
authenticationType?: AuthenticationType;
/**
* The base64 encoded certificate raw data.
*/
certificate: string;
}
/**
* The upload registration certificate response.
*/
export interface UploadCertificateResponse {
/**
* Specifies authentication type. Possible values include: 'Invalid', 'AzureActiveDirectory'
*/
authType?: AuthenticationType;
/**
* The resource ID of the Data Box Edge/Gateway device.
*/
resourceId: string;
/**
* Azure Active Directory tenant authority.
*/
aadAuthority: string;
/**
* Azure Active Directory tenant ID.
*/
aadTenantId: string;
/**
* Azure Active Directory service principal client ID.
*/
servicePrincipalClientId: string;
/**
* Azure Active Directory service principal object ID.
*/
servicePrincipalObjectId: string;
/**
* The azure management endpoint audience.
*/
azureManagementEndpointAudience: string;
}
/**
* Represents a user who has access to one or more shares on the Data Box Edge/Gateway device.
*/
export interface User extends ARMBaseModel {
/**
* The password details.
*/
encryptedPassword?: AsymmetricEncryptedSecret;
/**
* List of shares that the user has rights on. This field should not be specified during user
* creation.
*/
shareAccessRights?: ShareAccessRight[];
}
/**
* Optional Parameters.
*/
export interface DevicesListBySubscriptionOptionalParams extends msRest.RequestOptionsBase {
/**
* Specify $expand=details to populate additional fields related to the resource or Specify
* $skipToken=<token> to populate the next page in the list.
*/
expand?: string;
}
/**
* Optional Parameters.
*/
export interface DevicesListByResourceGroupOptionalParams extends msRest.RequestOptionsBase {
/**
* Specify $expand=details to populate additional fields related to the resource or Specify
* $skipToken=<token> to populate the next page in the list.
*/
expand?: string;
}
/**
* Optional Parameters.
*/
export interface DevicesUpdateOptionalParams extends msRest.RequestOptionsBase {
/**
* The tags attached to the Data Box Edge/Gateway resource.
*/
tags?: { [propertyName: string]: string };
}
/**
* Optional Parameters.
*/
export interface DevicesUploadCertificateOptionalParams extends msRest.RequestOptionsBase {
/**
* The authentication type. Possible values include: 'Invalid', 'AzureActiveDirectory'
*/
authenticationType?: AuthenticationType;
}
/**
* Optional Parameters.
*/
export interface TriggersListByDataBoxEdgeDeviceOptionalParams extends msRest.RequestOptionsBase {
/**
* Specify $filter='CustomContextTag eq <tag>' to filter on custom context tag property
*/
expand?: string;
}
/**
* Optional Parameters.
*/
export interface UsersCreateOrUpdateOptionalParams extends msRest.RequestOptionsBase {
/**
* The password details.
*/
encryptedPassword?: AsymmetricEncryptedSecret;
/**
* List of shares that the user has rights on. This field should not be specified during user
* creation.
*/
shareAccessRights?: ShareAccessRight[];
}
/**
* Optional Parameters.
*/
export interface UsersBeginCreateOrUpdateOptionalParams extends msRest.RequestOptionsBase {
/**
* The password details.
*/
encryptedPassword?: AsymmetricEncryptedSecret;
/**
* List of shares that the user has rights on. This field should not be specified during user
* creation.
*/
shareAccessRights?: ShareAccessRight[];
}
/**
* An interface representing DataBoxEdgeManagementClientOptions.
*/
export interface DataBoxEdgeManagementClientOptions extends AzureServiceClientOptions {
baseUri?: string;
}
/**
* @interface
* The list of operations used for the discovery of available provider operations.
* @extends Array<Operation>
*/
export interface OperationsList extends Array<Operation> {
/**
* Link to the next set of results.
*/
nextLink?: string;
}
/**
* @interface
* The collection of Data Box Edge/Gateway devices.
* @extends Array<DataBoxEdgeDevice>
*/
export interface DataBoxEdgeDeviceList extends Array<DataBoxEdgeDevice> {
/**
* Link to the next set of results.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nextLink?: string;
}
/**
* @interface
* Collection of alerts.
* @extends Array<Alert>
*/
export interface AlertList extends Array<Alert> {
/**
* Link to the next set of results.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nextLink?: string;
}
/**
* @interface
* The collection of bandwidth schedules.
* @extends Array<BandwidthSchedule>
*/
export interface BandwidthSchedulesList extends Array<BandwidthSchedule> {
/**
* Link to the next set of results.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nextLink?: string;
}
/**
* @interface
* List of order entities.
* @extends Array<Order>
*/
export interface OrderList extends Array<Order> {
/**
* Link to the next set of results.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nextLink?: string;
}
/**
* @interface
* Collection of all the roles on the Data Box Edge device.
* @extends Array<RoleUnion>
*/
export interface RoleList extends Array<RoleUnion> {
/**
* Link to the next set of results.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nextLink?: string;
}
/**
* @interface
* Collection of all the shares on the Data Box Edge/Gateway device.
* @extends Array<Share>
*/
export interface ShareList extends Array<Share> {
/**
* Link to the next set of results.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nextLink?: string;
}
/**
* @interface
* The collection of storage account credentials.
* @extends Array<StorageAccountCredential>
*/
export interface StorageAccountCredentialList extends Array<StorageAccountCredential> {
/**
* Link to the next set of results.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nextLink?: string;
}
/**
* @interface
* Collection of all trigger on the data box edge device.
* @extends Array<TriggerUnion>
*/
export interface TriggerList extends Array<TriggerUnion> {
/**
* Link to the next set of results.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nextLink?: string;
}
/**
* @interface
* Collection of users.
* @extends Array<User>
*/
export interface UserList extends Array<User> {
/**
* Link to the next set of results.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nextLink?: string;
}
/**
* Defines values for AlertSeverity.
* Possible values include: 'Informational', 'Warning', 'Critical'
* @readonly
* @enum {string}
*/
export type AlertSeverity = 'Informational' | 'Warning' | 'Critical';
/**
* Defines values for EncryptionAlgorithm.
* Possible values include: 'None', 'AES256', 'RSAES_PKCS1_v_1_5'
* @readonly
* @enum {string}
*/
export type EncryptionAlgorithm = 'None' | 'AES256' | 'RSAES_PKCS1_v_1_5';
/**
* Defines values for AzureContainerDataFormat.
* Possible values include: 'BlockBlob', 'PageBlob', 'AzureFile'
* @readonly
* @enum {string}
*/
export type AzureContainerDataFormat = 'BlockBlob' | 'PageBlob' | 'AzureFile';
/**
* Defines values for DayOfWeek.
* Possible values include: 'Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday',
* 'Saturday'
* @readonly
* @enum {string}
*/
export type DayOfWeek = 'Sunday' | 'Monday' | 'Tuesday' | 'Wednesday' | 'Thursday' | 'Friday' | 'Saturday';
/**
* Defines values for ClientPermissionType.
* Possible values include: 'NoAccess', 'ReadOnly', 'ReadWrite'
* @readonly
* @enum {string}
*/
export type ClientPermissionType = 'NoAccess' | 'ReadOnly' | 'ReadWrite';
/**
* Defines values for SkuName.
* Possible values include: 'Gateway', 'Edge'
* @readonly
* @enum {string}
*/
export type SkuName = 'Gateway' | 'Edge';
/**
* Defines values for SkuTier.
* Possible values include: 'Standard'
* @readonly
* @enum {string}
*/
export type SkuTier = 'Standard';
/**
* Defines values for DataBoxEdgeDeviceStatus.
* Possible values include: 'ReadyToSetup', 'Online', 'Offline', 'NeedsAttention', 'Disconnected',
* 'PartiallyDisconnected'
* @readonly
* @enum {string}
*/
export type DataBoxEdgeDeviceStatus = 'ReadyToSetup' | 'Online' | 'Offline' | 'NeedsAttention' | 'Disconnected' | 'PartiallyDisconnected';
/**
* Defines values for DeviceType.
* Possible values include: 'DataBoxEdgeDevice'
* @readonly
* @enum {string}
*/
export type DeviceType = 'DataBoxEdgeDevice';
/**
* Defines values for RoleTypes.
* Possible values include: 'IOT', 'ASA', 'Functions', 'Cognitive'
* @readonly
* @enum {string}
*/
export type RoleTypes = 'IOT' | 'ASA' | 'Functions' | 'Cognitive';
/**
* Defines values for PlatformType.
* Possible values include: 'Windows', 'Linux'
* @readonly
* @enum {string}
*/
export type PlatformType = 'Windows' | 'Linux';
/**
* Defines values for RoleStatus.
* Possible values include: 'Enabled', 'Disabled'
* @readonly
* @enum {string}
*/
export type RoleStatus = 'Enabled' | 'Disabled';
/**
* Defines values for JobStatus.
* Possible values include: 'Invalid', 'Running', 'Succeeded', 'Failed', 'Canceled', 'Paused',
* 'Scheduled'
* @readonly
* @enum {string}
*/
export type JobStatus = 'Invalid' | 'Running' | 'Succeeded' | 'Failed' | 'Canceled' | 'Paused' | 'Scheduled';
/**
* Defines values for JobType.
* Possible values include: 'Invalid', 'ScanForUpdates', 'DownloadUpdates', 'InstallUpdates',
* 'RefreshShare'
* @readonly
* @enum {string}
*/
export type JobType = 'Invalid' | 'ScanForUpdates' | 'DownloadUpdates' | 'InstallUpdates' | 'RefreshShare';
/**
* Defines values for UpdateOperationStage.
* Possible values include: 'Unknown', 'Initial', 'ScanStarted', 'ScanComplete', 'ScanFailed',
* 'DownloadStarted', 'DownloadComplete', 'DownloadFailed', 'InstallStarted', 'InstallComplete',
* 'InstallFailed', 'RebootInitiated', 'Success', 'Failure', 'RescanStarted', 'RescanComplete',
* 'RescanFailed'
* @readonly
* @enum {string}
*/
export type UpdateOperationStage = 'Unknown' | 'Initial' | 'ScanStarted' | 'ScanComplete' | 'ScanFailed' | 'DownloadStarted' | 'DownloadComplete' | 'DownloadFailed' | 'InstallStarted' | 'InstallComplete' | 'InstallFailed' | 'RebootInitiated' | 'Success' | 'Failure' | 'RescanStarted' | 'RescanComplete' | 'RescanFailed';
/**
* Defines values for DownloadPhase.
* Possible values include: 'Unknown', 'Initializing', 'Downloading', 'Verifying'
* @readonly
* @enum {string}
*/
export type DownloadPhase = 'Unknown' | 'Initializing' | 'Downloading' | 'Verifying';
/**
* Defines values for MetricUnit.
* Possible values include: 'NotSpecified', 'Percent', 'Count', 'Seconds', 'Milliseconds', 'Bytes',
* 'BytesPerSecond', 'CountPerSecond'
* @readonly
* @enum {string}
*/
export type MetricUnit = 'NotSpecified' | 'Percent' | 'Count' | 'Seconds' | 'Milliseconds' | 'Bytes' | 'BytesPerSecond' | 'CountPerSecond';
/**
* Defines values for MetricAggregationType.
* Possible values include: 'NotSpecified', 'None', 'Average', 'Minimum', 'Maximum', 'Total',
* 'Count'
* @readonly
* @enum {string}
*/
export type MetricAggregationType = 'NotSpecified' | 'None' | 'Average' | 'Minimum' | 'Maximum' | 'Total' | 'Count';
/**
* Defines values for MetricCategory.
* Possible values include: 'Capacity', 'Transaction'
* @readonly
* @enum {string}
*/
export type MetricCategory = 'Capacity' | 'Transaction';
/**
* Defines values for TimeGrain.
* Possible values include: 'PT1M', 'PT5M', 'PT15M', 'PT30M', 'PT1H', 'PT6H', 'PT12H', 'PT1D'
* @readonly
* @enum {string}
*/
export type TimeGrain = 'PT1M' | 'PT5M' | 'PT15M' | 'PT30M' | 'PT1H' | 'PT6H' | 'PT12H' | 'PT1D';
/**
* Defines values for NetworkGroup.
* Possible values include: 'None', 'NonRDMA', 'RDMA'
* @readonly
* @enum {string}
*/
export type NetworkGroup = 'None' | 'NonRDMA' | 'RDMA';
/**
* Defines values for NetworkAdapterStatus.
* Possible values include: 'Inactive', 'Active'
* @readonly
* @enum {string}
*/
export type NetworkAdapterStatus = 'Inactive' | 'Active';
/**
* Defines values for NetworkAdapterRDMAStatus.
* Possible values include: 'Incapable', 'Capable'
* @readonly
* @enum {string}
*/
export type NetworkAdapterRDMAStatus = 'Incapable' | 'Capable';
/**
* Defines values for NetworkAdapterDHCPStatus.
* Possible values include: 'Disabled', 'Enabled'
* @readonly
* @enum {string}
*/
export type NetworkAdapterDHCPStatus = 'Disabled' | 'Enabled';
/**
* Defines values for OrderState.
* Possible values include: 'Untracked', 'AwaitingFulfilment', 'AwaitingPreparation',
* 'AwaitingShipment', 'Shipped', 'Arriving', 'Delivered', 'ReplacementRequested', 'LostDevice',
* 'Declined', 'ReturnInitiated', 'AwaitingReturnShipment', 'ShippedBack', 'CollectedAtMicrosoft'
* @readonly
* @enum {string}
*/
export type OrderState = 'Untracked' | 'AwaitingFulfilment' | 'AwaitingPreparation' | 'AwaitingShipment' | 'Shipped' | 'Arriving' | 'Delivered' | 'ReplacementRequested' | 'LostDevice' | 'Declined' | 'ReturnInitiated' | 'AwaitingReturnShipment' | 'ShippedBack' | 'CollectedAtMicrosoft';
/**
* Defines values for AuthenticationType.
* Possible values include: 'Invalid', 'AzureActiveDirectory'
* @readonly
* @enum {string}
*/
export type AuthenticationType = 'Invalid' | 'AzureActiveDirectory';
/**
* Defines values for ShareStatus.
* Possible values include: 'Online', 'Offline'
* @readonly
* @enum {string}
*/
export type ShareStatus = 'Online' | 'Offline';
/**
* Defines values for MonitoringStatus.
* Possible values include: 'Enabled', 'Disabled'
* @readonly
* @enum {string}
*/
export type MonitoringStatus = 'Enabled' | 'Disabled';
/**
* Defines values for ShareAccessProtocol.
* Possible values include: 'SMB', 'NFS'
* @readonly
* @enum {string}
*/
export type ShareAccessProtocol = 'SMB' | 'NFS';
/**
* Defines values for ShareAccessType.
* Possible values include: 'Change', 'Read', 'Custom'
* @readonly
* @enum {string}
*/
export type ShareAccessType = 'Change' | 'Read' | 'Custom';
/**
* Defines values for DataPolicy.
* Possible values include: 'Cloud', 'Local'
* @readonly
* @enum {string}
*/
export type DataPolicy = 'Cloud' | 'Local';
/**
* Defines values for SSLStatus.
* Possible values include: 'Enabled', 'Disabled'
* @readonly
* @enum {string}
*/
export type SSLStatus = 'Enabled' | 'Disabled';
/**
* Defines values for AccountType.
* Possible values include: 'GeneralPurposeStorage', 'BlobStorage'
* @readonly
* @enum {string}
*/
export type AccountType = 'GeneralPurposeStorage' | 'BlobStorage';
/**
* Defines values for InstallRebootBehavior.
* Possible values include: 'NeverReboots', 'RequiresReboot', 'RequestReboot'
* @readonly
* @enum {string}
*/
export type InstallRebootBehavior = 'NeverReboots' | 'RequiresReboot' | 'RequestReboot';
/**
* Defines values for UpdateOperation.
* Possible values include: 'None', 'Scan', 'Download', 'Install'
* @readonly
* @enum {string}
*/
export type UpdateOperation = 'None' | 'Scan' | 'Download' | 'Install';
/**
* Contains response data for the list operation.
*/
export type OperationsListResponse = OperationsList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: OperationsList;
};
};
/**
* Contains response data for the listNext operation.
*/
export type OperationsListNextResponse = OperationsList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: OperationsList;
};
};
/**
* Contains response data for the listBySubscription operation.
*/
export type DevicesListBySubscriptionResponse = DataBoxEdgeDeviceList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: DataBoxEdgeDeviceList;
};
};
/**
* Contains response data for the listByResourceGroup operation.
*/
export type DevicesListByResourceGroupResponse = DataBoxEdgeDeviceList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: DataBoxEdgeDeviceList;
};
};
/**
* Contains response data for the get operation.
*/
export type DevicesGetResponse = DataBoxEdgeDevice & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: DataBoxEdgeDevice;
};
};
/**
* Contains response data for the createOrUpdate operation.
*/
export type DevicesCreateOrUpdateResponse = DataBoxEdgeDevice & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: DataBoxEdgeDevice;
};
};
/**
* Contains response data for the update operation.
*/
export type DevicesUpdateResponse = DataBoxEdgeDevice & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: DataBoxEdgeDevice;
};
};
/**
* Contains response data for the getExtendedInformation operation.
*/
export type DevicesGetExtendedInformationResponse = DataBoxEdgeDeviceExtendedInfo & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: DataBoxEdgeDeviceExtendedInfo;
};
};
/**
* Contains response data for the getNetworkSettings operation.
*/
export type DevicesGetNetworkSettingsResponse = NetworkSettings & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: NetworkSettings;
};
};
/**
* Contains response data for the getUpdateSummary operation.
*/
export type DevicesGetUpdateSummaryResponse = UpdateSummary & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: UpdateSummary;
};
};
/**
* Contains response data for the uploadCertificate operation.
*/
export type DevicesUploadCertificateResponse = UploadCertificateResponse & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: UploadCertificateResponse;
};
};
/**
* Contains response data for the beginCreateOrUpdate operation.
*/
export type DevicesBeginCreateOrUpdateResponse = DataBoxEdgeDevice & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: DataBoxEdgeDevice;
};
};
/**
* Contains response data for the listBySubscriptionNext operation.
*/
export type DevicesListBySubscriptionNextResponse = DataBoxEdgeDeviceList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: DataBoxEdgeDeviceList;
};
};
/**
* Contains response data for the listByResourceGroupNext operation.
*/
export type DevicesListByResourceGroupNextResponse = DataBoxEdgeDeviceList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: DataBoxEdgeDeviceList;
};
};
/**
* Contains response data for the listByDataBoxEdgeDevice operation.
*/
export type AlertsListByDataBoxEdgeDeviceResponse = AlertList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: AlertList;
};
};
/**
* Contains response data for the get operation.
*/
export type AlertsGetResponse = Alert & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Alert;
};
};
/**
* Contains response data for the listByDataBoxEdgeDeviceNext operation.
*/
export type AlertsListByDataBoxEdgeDeviceNextResponse = AlertList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: AlertList;
};
};
/**
* Contains response data for the listByDataBoxEdgeDevice operation.
*/
export type BandwidthSchedulesListByDataBoxEdgeDeviceResponse = BandwidthSchedulesList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: BandwidthSchedulesList;
};
};
/**
* Contains response data for the get operation.
*/
export type BandwidthSchedulesGetResponse = BandwidthSchedule & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: BandwidthSchedule;
};
};
/**
* Contains response data for the createOrUpdate operation.
*/
export type BandwidthSchedulesCreateOrUpdateResponse = BandwidthSchedule & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: BandwidthSchedule;
};
};
/**
* Contains response data for the beginCreateOrUpdate operation.
*/
export type BandwidthSchedulesBeginCreateOrUpdateResponse = BandwidthSchedule & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: BandwidthSchedule;
};
};
/**
* Contains response data for the listByDataBoxEdgeDeviceNext operation.
*/
export type BandwidthSchedulesListByDataBoxEdgeDeviceNextResponse = BandwidthSchedulesList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: BandwidthSchedulesList;
};
};
/**
* Contains response data for the get operation.
*/
export type JobsGetResponse = Job & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Job;
};
};
/**
* Contains response data for the get operation.
*/
export type OperationsStatusGetResponse = Job & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Job;
};
};
/**
* Contains response data for the listByDataBoxEdgeDevice operation.
*/
export type OrdersListByDataBoxEdgeDeviceResponse = OrderList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: OrderList;
};
};
/**
* Contains response data for the get operation.
*/
export type OrdersGetResponse = Order & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Order;
};
};
/**
* Contains response data for the createOrUpdate operation.
*/
export type OrdersCreateOrUpdateResponse = Order & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Order;
};
};
/**
* Contains response data for the beginCreateOrUpdate operation.
*/
export type OrdersBeginCreateOrUpdateResponse = Order & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Order;
};
};
/**
* Contains response data for the listByDataBoxEdgeDeviceNext operation.
*/
export type OrdersListByDataBoxEdgeDeviceNextResponse = OrderList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: OrderList;
};
};
/**
* Contains response data for the listByDataBoxEdgeDevice operation.
*/
export type RolesListByDataBoxEdgeDeviceResponse = RoleList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleList;
};
};
/**
* Contains response data for the get operation.
*/
export type RolesGetResponse = RoleUnion & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleUnion;
};
};
/**
* Contains response data for the createOrUpdate operation.
*/
export type RolesCreateOrUpdateResponse = RoleUnion & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleUnion;
};
};
/**
* Contains response data for the beginCreateOrUpdate operation.
*/
export type RolesBeginCreateOrUpdateResponse = RoleUnion & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleUnion;
};
};
/**
* Contains response data for the listByDataBoxEdgeDeviceNext operation.
*/
export type RolesListByDataBoxEdgeDeviceNextResponse = RoleList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: RoleList;
};
};
/**
* Contains response data for the listByDataBoxEdgeDevice operation.
*/
export type SharesListByDataBoxEdgeDeviceResponse = ShareList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ShareList;
};
};
/**
* Contains response data for the get operation.
*/
export type SharesGetResponse = Share & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Share;
};
};
/**
* Contains response data for the createOrUpdate operation.
*/
export type SharesCreateOrUpdateResponse = Share & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Share;
};
};
/**
* Contains response data for the beginCreateOrUpdate operation.
*/
export type SharesBeginCreateOrUpdateResponse = Share & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Share;
};
};
/**
* Contains response data for the listByDataBoxEdgeDeviceNext operation.
*/
export type SharesListByDataBoxEdgeDeviceNextResponse = ShareList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ShareList;
};
};
/**
* Contains response data for the listByDataBoxEdgeDevice operation.
*/
export type StorageAccountCredentialsListByDataBoxEdgeDeviceResponse = StorageAccountCredentialList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: StorageAccountCredentialList;
};
};
/**
* Contains response data for the get operation.
*/
export type StorageAccountCredentialsGetResponse = StorageAccountCredential & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: StorageAccountCredential;
};
};
/**
* Contains response data for the createOrUpdate operation.
*/
export type StorageAccountCredentialsCreateOrUpdateResponse = StorageAccountCredential & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: StorageAccountCredential;
};
};
/**
* Contains response data for the beginCreateOrUpdate operation.
*/
export type StorageAccountCredentialsBeginCreateOrUpdateResponse = StorageAccountCredential & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: StorageAccountCredential;
};
};
/**
* Contains response data for the listByDataBoxEdgeDeviceNext operation.
*/
export type StorageAccountCredentialsListByDataBoxEdgeDeviceNextResponse = StorageAccountCredentialList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: StorageAccountCredentialList;
};
};
/**
* Contains response data for the listByDataBoxEdgeDevice operation.
*/
export type TriggersListByDataBoxEdgeDeviceResponse = TriggerList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: TriggerList;
};
};
/**
* Contains response data for the get operation.
*/
export type TriggersGetResponse = TriggerUnion & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: TriggerUnion;
};
};
/**
* Contains response data for the createOrUpdate operation.
*/
export type TriggersCreateOrUpdateResponse = TriggerUnion & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: TriggerUnion;
};
};
/**
* Contains response data for the beginCreateOrUpdate operation.
*/
export type TriggersBeginCreateOrUpdateResponse = TriggerUnion & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: TriggerUnion;
};
};
/**
* Contains response data for the listByDataBoxEdgeDeviceNext operation.
*/
export type TriggersListByDataBoxEdgeDeviceNextResponse = TriggerList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: TriggerList;
};
};
/**
* Contains response data for the listByDataBoxEdgeDevice operation.
*/
export type UsersListByDataBoxEdgeDeviceResponse = UserList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: UserList;
};
};
/**
* Contains response data for the get operation.
*/
export type UsersGetResponse = User & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: User;
};
};
/**
* Contains response data for the createOrUpdate operation.
*/
export type UsersCreateOrUpdateResponse = User & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: User;
};
};
/**
* Contains response data for the beginCreateOrUpdate operation.
*/
export type UsersBeginCreateOrUpdateResponse = User & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: User;
};
};
/**
* Contains response data for the listByDataBoxEdgeDeviceNext operation.
*/
export type UsersListByDataBoxEdgeDeviceNextResponse = UserList & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: UserList;
};
}; | the_stack |
import {Dictionary, Config} from "../synth/SynthConfig";
import {Note, Pattern} from "../synth/synth";
import {SongDocument} from "./SongDocument";
import {ChangeGroup} from "./Change";
import {ChangeTrackSelection, ChangeChannelBar, ChangeDuplicateSelectedReusedPatterns, ChangeNoteAdded, ChangeNoteTruncate, ChangePatternNumbers, ChangePatternSelection, ChangeInsertBars, ChangeDeleteBars, ChangeEnsurePatternExists, ChangeNoteLength, ChangePaste, ChangePatternInstrument, ChangePatternsPerChannel, ChangePatternRhythm, ChangePatternScale, ChangeTranspose, comparePatternNotes, unionOfUsedNotes, generateScaleMap} from "./changes";
interface PatternCopy {
instrument: number;
notes: any[];
}
interface ChannelCopy {
isNoise: boolean;
patterns: Dictionary<PatternCopy>;
bars: number[];
}
interface SelectionCopy {
partDuration: number;
channels: ChannelCopy[];
}
export class Selection {
public boxSelectionX0: number = 0;
public boxSelectionY0: number = 0;
public boxSelectionX1: number = 0;
public boxSelectionY1: number = 0;
public digits: string = "";
public patternSelectionStart: number = 0;
public patternSelectionEnd: number = 0;
public patternSelectionActive: boolean = false;
private _changeTranspose: ChangeGroup | null = null;
private _changeTrack: ChangeGroup | null = null;
constructor(private _doc: SongDocument) {}
public toJSON(): {x0: number, x1: number, y0: number, y1: number, start: number, end: number} {
return {
"x0": this.boxSelectionX0,
"x1": this.boxSelectionX1,
"y0": this.boxSelectionY0,
"y1": this.boxSelectionY1,
"start": this.patternSelectionStart,
"end": this.patternSelectionEnd,
};
}
public fromJSON(json: {x0: number, x1: number, y0: number, y1: number, start: number, end: number}): void {
if (json == null) return;
this.boxSelectionX0 = +json["x0"];
this.boxSelectionX1 = +json["x1"];
this.boxSelectionY0 = +json["y0"];
this.boxSelectionY1 = +json["y1"];
this.patternSelectionStart = +json["start"];
this.patternSelectionEnd = +json["end"];
this.digits = "";
this.patternSelectionActive = this.patternSelectionStart < this.patternSelectionEnd;
}
public selectionUpdated(): void {
this._doc.notifier.changed();
this.digits = "";
}
public get boxSelectionBar(): number {
return Math.min(this.boxSelectionX0, this.boxSelectionX1);
}
public get boxSelectionChannel(): number {
return Math.min(this.boxSelectionY0, this.boxSelectionY1);
}
public get boxSelectionWidth(): number {
return Math.abs(this.boxSelectionX0 - this.boxSelectionX1) + 1;
}
public get boxSelectionHeight(): number {
return Math.abs(this.boxSelectionY0 - this.boxSelectionY1) + 1;
}
public scrollToSelection(): void {
this._doc.barScrollPos = Math.min(this._doc.barScrollPos, this.boxSelectionX1);
this._doc.barScrollPos = Math.max(this._doc.barScrollPos, this.boxSelectionX1 - (this._doc.trackVisibleBars - 1));
}
public setChannelBar(channel: number, bar: number): void {
const canReplaceLastChange: boolean = this._doc.lastChangeWas(this._changeTrack);
this._changeTrack = new ChangeGroup();
this._changeTrack.append(new ChangeChannelBar(this._doc, channel, bar));
// Don't erase existing redo history just to look at highlighted pattern.
if (!this._doc.hasRedoHistory()) {
this._doc.record(this._changeTrack, canReplaceLastChange);
}
this.selectionUpdated();
}
public setPattern(pattern: number): void {
this._doc.record(new ChangePatternNumbers(this._doc, pattern, this.boxSelectionBar, this.boxSelectionChannel, this.boxSelectionWidth, this.boxSelectionHeight));
}
public nextDigit(digit: string): void {
if (this.digits.length > 0 && this.digits != String(this._doc.song.channels[this.boxSelectionChannel].bars[this.boxSelectionBar])) {
this.digits = "";
}
this.digits += digit;
let parsed: number = parseInt(this.digits);
if (parsed <= this._doc.song.patternsPerChannel) {
this.setPattern(parsed);
return;
}
this.digits = digit;
parsed = parseInt(this.digits);
if (parsed <= this._doc.song.patternsPerChannel) {
this.setPattern(parsed);
return;
}
this.digits = "";
}
public insertBars(): void {
this._doc.record(new ChangeInsertBars(this._doc, this.boxSelectionBar + this.boxSelectionWidth, this.boxSelectionWidth));
const width: number = this.boxSelectionWidth;
this.boxSelectionX0 += width;
this.boxSelectionX1 += width;
}
public deleteBars(): void {
const group: ChangeGroup = new ChangeGroup();
if (this._doc.selection.patternSelectionActive) {
if (this.boxSelectionWidth > 1 || this.boxSelectionHeight > 1) {
group.append(new ChangeDuplicateSelectedReusedPatterns(this._doc, this.boxSelectionBar, this.boxSelectionWidth, this.boxSelectionChannel, this.boxSelectionHeight));
}
for (const channel of this._eachSelectedChannel()) {
for (const pattern of this._eachSelectedPattern(channel)) {
group.append(new ChangeNoteTruncate(this._doc, pattern, this._doc.selection.patternSelectionStart, this._doc.selection.patternSelectionEnd));
}
}
group.append(new ChangePatternSelection(this._doc, 0, 0));
} else {
group.append(new ChangeDeleteBars(this._doc, this.boxSelectionBar, this.boxSelectionWidth));
const width: number = this.boxSelectionWidth;
this.boxSelectionX0 = Math.max(0, this.boxSelectionX0 - width);
this.boxSelectionX1 = Math.max(0, this.boxSelectionX1 - width);
}
this._doc.record(group);
}
private *_eachSelectedChannel(): IterableIterator<number> {
for (let channel: number = this.boxSelectionChannel; channel < this.boxSelectionChannel + this.boxSelectionHeight; channel++) {
yield channel;
}
}
private *_eachSelectedBar(): IterableIterator<number> {
for (let bar: number = this.boxSelectionBar; bar < this.boxSelectionBar + this.boxSelectionWidth; bar++) {
yield bar;
}
}
private *_eachSelectedPattern(channel: number): IterableIterator<Pattern> {
const handledPatterns: Dictionary<boolean> = {};
for (const bar of this._eachSelectedBar()) {
const currentPatternIndex: number = this._doc.song.channels[channel].bars[bar];
if (currentPatternIndex == 0) continue;
if (handledPatterns[String(currentPatternIndex)]) continue;
handledPatterns[String(currentPatternIndex)] = true;
const pattern: Pattern | null = this._doc.song.getPattern(channel, bar);
if (pattern == null) throw new Error();
yield pattern;
}
}
private _patternIndexIsUnused(channel: number, patternIndex: number): boolean {
for (let i: number = 0; i < this._doc.song.barCount; i++) {
if (this._doc.song.channels[channel].bars[i] == patternIndex) {
return false;
}
}
return true;
}
public copy(): void {
const channels: ChannelCopy[] = [];
for (const channel of this._eachSelectedChannel()) {
const patterns: Dictionary<PatternCopy> = {};
const bars: number[] = [];
for (const bar of this._eachSelectedBar()) {
const patternNumber: number = this._doc.song.channels[channel].bars[bar];
bars.push(patternNumber);
if (patterns[String(patternNumber)] == undefined) {
const pattern: Pattern | null = this._doc.song.getPattern(channel, bar);
let instrument: number = 0;
let notes: Note[] = [];
if (pattern != null) {
instrument = pattern.instrument;
if (this.patternSelectionActive) {
for (const note of pattern.cloneNotes()) {
if (note.end <= this.patternSelectionStart) continue;
if (note.start >= this.patternSelectionEnd) continue;
if (note.start < this.patternSelectionStart || note.end > this.patternSelectionEnd) {
new ChangeNoteLength(null, note, Math.max(note.start, this.patternSelectionStart), Math.min(this.patternSelectionEnd, note.end));
}
note.start -= this.patternSelectionStart;
note.end -= this.patternSelectionStart;
notes.push(note);
}
} else {
notes = pattern.notes;
}
}
patterns[String(patternNumber)] = {"instrument": instrument, "notes": notes};
}
}
const channelCopy: ChannelCopy = {
"isNoise": this._doc.song.getChannelIsNoise(channel),
"patterns": patterns,
"bars": bars,
};
channels.push(channelCopy);
}
const selectionCopy: SelectionCopy = {
"partDuration": this.patternSelectionActive ? this.patternSelectionEnd - this.patternSelectionStart : this._doc.song.beatsPerBar * Config.partsPerBeat,
"channels": channels,
};
window.localStorage.setItem("selectionCopy", JSON.stringify(selectionCopy));
}
// I'm sorry this function is so complicated!
// Basically I'm trying to avoid accidentally modifying patterns that are used
// elsewhere in the song (unless we're just pasting a single pattern) but I'm
// also trying to reuse patterns where it makes sense to do so, especially
// in the same channel it was copied from.
public pasteNotes(): void {
const selectionCopy: SelectionCopy | null = JSON.parse(String(window.localStorage.getItem("selectionCopy")));
if (selectionCopy == null) return;
const channelCopies: ChannelCopy[] = selectionCopy["channels"] || [];
const copiedPartDuration: number = selectionCopy["partDuration"] >>> 0;
const group: ChangeGroup = new ChangeGroup();
const fillSelection: boolean = (this.boxSelectionWidth > 1 || this.boxSelectionHeight > 1);
const pasteHeight: number = fillSelection ? this.boxSelectionHeight : Math.min(channelCopies.length, this._doc.song.getChannelCount() - this.boxSelectionChannel);
for (let pasteChannel: number = 0; pasteChannel < pasteHeight; pasteChannel++) {
const channelCopy: ChannelCopy = channelCopies[pasteChannel % channelCopies.length];
const channel: number = this.boxSelectionChannel + pasteChannel;
const isNoise: boolean = !!channelCopy["isNoise"];
const patternCopies: Dictionary<PatternCopy> = channelCopy["patterns"] || {};
const copiedBars: number[] = channelCopy["bars"] || [];
if (copiedBars.length == 0) continue;
if (isNoise != this._doc.song.getChannelIsNoise(channel)) continue;
const pasteWidth: number = fillSelection ? this.boxSelectionWidth : Math.min(copiedBars.length, this._doc.song.barCount - this.boxSelectionBar);
if (!fillSelection && copiedBars.length == 1 && channelCopies.length == 1) {
// Special case: if there's just one pattern being copied, try to insert it
// into whatever pattern is already selected.
const copiedPatternIndex: number = copiedBars[0] >>> 0;
const bar: number = this.boxSelectionBar;
const currentPatternIndex: number = this._doc.song.channels[channel].bars[bar];
if (copiedPatternIndex == 0 && currentPatternIndex == 0) continue;
const patternCopy: PatternCopy = patternCopies[String(copiedPatternIndex)];
const instrumentCopy: number = Math.min(patternCopy["instrument"] >>> 0, this._doc.song.instrumentsPerChannel - 1);
if (currentPatternIndex == 0) {
const existingPattern: Pattern | undefined = this._doc.song.channels[channel].patterns[copiedPatternIndex - 1];
if (existingPattern != undefined &&
!this.patternSelectionActive &&
((comparePatternNotes(patternCopy["notes"], existingPattern.notes) && instrumentCopy == existingPattern.instrument) ||
this._patternIndexIsUnused(channel, copiedPatternIndex)))
{
group.append(new ChangePatternNumbers(this._doc, copiedPatternIndex, bar, channel, 1, 1));
} else {
group.append(new ChangeEnsurePatternExists(this._doc, channel, bar));
}
}
const pattern: Pattern | null = this._doc.song.getPattern(channel, bar);
if (pattern == null) throw new Error();
group.append(new ChangePaste(this._doc, pattern, patternCopy["notes"], this.patternSelectionActive ? this.patternSelectionStart : 0, this.patternSelectionActive ? this.patternSelectionEnd : Config.partsPerBeat * this._doc.song.beatsPerBar, copiedPartDuration));
group.append(new ChangePatternInstrument(this._doc, instrumentCopy, pattern));
} else if (this.patternSelectionActive) {
const reusablePatterns: Dictionary<number> = {};
const usedPatterns: Dictionary<boolean> = {};
group.append(new ChangeDuplicateSelectedReusedPatterns(this._doc, this.boxSelectionBar, pasteWidth, this.boxSelectionChannel, pasteHeight));
for (let pasteBar: number = 0; pasteBar < pasteWidth; pasteBar++) {
const bar: number = this.boxSelectionBar + pasteBar;
const copiedPatternIndex: number = copiedBars[pasteBar % copiedBars.length] >>> 0;
const currentPatternIndex: number = this._doc.song.channels[channel].bars[bar];
const reusedIndex: string = [copiedPatternIndex, currentPatternIndex].join(",");
if (copiedPatternIndex == 0 && currentPatternIndex == 0) continue;
if (reusablePatterns[reusedIndex] != undefined) {
group.append(new ChangePatternNumbers(this._doc, reusablePatterns[reusedIndex], bar, channel, 1, 1));
continue;
}
if (currentPatternIndex == 0) {
group.append(new ChangeEnsurePatternExists(this._doc, channel, bar));
const patternCopy: PatternCopy = patternCopies[String(copiedPatternIndex)];
const instrumentCopy: number = Math.min(patternCopy["instrument"] >>> 0, this._doc.song.instrumentsPerChannel - 1);
const pattern: Pattern = this._doc.song.getPattern(channel, bar)!;
group.append(new ChangePatternInstrument(this._doc, instrumentCopy, pattern));
} else {
const pattern: Pattern | null = this._doc.song.getPattern(channel, bar);
if (pattern == null) throw new Error();
if (!usedPatterns[String(currentPatternIndex)]) {
usedPatterns[String(currentPatternIndex)] = true;
} else {
// If this pattern is used here and elsewhere, it's not safe to modify it directly, so
// make a duplicate of it and modify that instead.
group.append(new ChangePatternNumbers(this._doc, 0, bar, channel, 1, 1));
group.append(new ChangeEnsurePatternExists(this._doc, channel, bar));
const newPattern: Pattern | null = this._doc.song.getPattern(channel, bar);
if (newPattern == null) throw new Error();
group.append(new ChangePatternInstrument(this._doc, pattern.instrument, newPattern));
for (const note of pattern.cloneNotes()) {
group.append(new ChangeNoteAdded(this._doc, newPattern, note, newPattern.notes.length, false));
}
}
}
const pattern: Pattern | null = this._doc.song.getPattern(channel, bar);
if (pattern == null) throw new Error();
if (copiedPatternIndex == 0) {
group.append(new ChangeNoteTruncate(this._doc, pattern, this.patternSelectionStart, this.patternSelectionEnd));
} else {
const patternCopy: PatternCopy = patternCopies[String(copiedPatternIndex)];
group.append(new ChangePaste(this._doc, pattern, patternCopy["notes"], this.patternSelectionStart, this.patternSelectionEnd, copiedPartDuration));
}
reusablePatterns[reusedIndex] = this._doc.song.channels[channel].bars[bar];
}
} else {
for (let pasteBar: number = 0; pasteBar < pasteWidth; pasteBar++) {
const bar: number = this.boxSelectionBar + pasteBar;
const removedPattern: number = this._doc.song.channels[channel].bars[bar];
if (removedPattern != 0) {
group.append(new ChangePatternNumbers(this._doc, 0, bar, channel, 1, 1));
if (this._patternIndexIsUnused(channel, removedPattern)) {
// When a pattern becomes unused when replaced by rectangular selection pasting,
// remove all the notes from the pattern so that it may be reused.
this._doc.song.channels[channel].patterns[removedPattern - 1].notes.length = 0;
}
}
}
const reusablePatterns: Dictionary<number> = {};
for (let pasteBar: number = 0; pasteBar < pasteWidth; pasteBar++) {
const bar: number = this.boxSelectionBar + pasteBar;
const copiedPatternIndex: number = copiedBars[pasteBar % copiedBars.length] >>> 0;
const reusedIndex: string = String(copiedPatternIndex);
if (copiedPatternIndex == 0) continue;
if (reusablePatterns[reusedIndex] != undefined) {
group.append(new ChangePatternNumbers(this._doc, reusablePatterns[reusedIndex], bar, channel, 1, 1));
continue;
}
const patternCopy: PatternCopy = patternCopies[String(copiedPatternIndex)];
const instrumentCopy: number = Math.min(patternCopy["instrument"] >>> 0, this._doc.song.instrumentsPerChannel - 1);
const existingPattern: Pattern | undefined = this._doc.song.channels[channel].patterns[copiedPatternIndex - 1];
if (existingPattern != undefined &&
copiedPartDuration == Config.partsPerBeat * this._doc.song.beatsPerBar &&
comparePatternNotes(patternCopy["notes"], existingPattern.notes) &&
instrumentCopy == existingPattern.instrument)
{
group.append(new ChangePatternNumbers(this._doc, copiedPatternIndex, bar, channel, 1, 1));
} else {
if (existingPattern != undefined && this._patternIndexIsUnused(channel, copiedPatternIndex)) {
group.append(new ChangePatternNumbers(this._doc, copiedPatternIndex, bar, channel, 1, 1));
} else {
group.append(new ChangeEnsurePatternExists(this._doc, channel, bar));
}
const pattern: Pattern | null = this._doc.song.getPattern(channel, bar);
if (pattern == null) throw new Error();
group.append(new ChangePaste(this._doc, pattern, patternCopy["notes"], this.patternSelectionActive ? this.patternSelectionStart : 0, this.patternSelectionActive ? this.patternSelectionEnd : Config.partsPerBeat * this._doc.song.beatsPerBar, copiedPartDuration));
group.append(new ChangePatternInstrument(this._doc, instrumentCopy, pattern));
}
reusablePatterns[reusedIndex] = this._doc.song.channels[channel].bars[bar];
}
}
}
this._doc.record(group);
}
public pasteNumbers(): void {
const selectionCopy: SelectionCopy | null = JSON.parse(String(window.localStorage.getItem("selectionCopy")));
if (selectionCopy == null) return;
const channelCopies: ChannelCopy[] = selectionCopy["channels"] || [];
const group: ChangeGroup = new ChangeGroup();
const fillSelection: boolean = (this.boxSelectionWidth > 1 || this.boxSelectionHeight > 1);
const pasteHeight: number = fillSelection ? this.boxSelectionHeight : Math.min(channelCopies.length, this._doc.song.getChannelCount() - this.boxSelectionChannel);
for (let pasteChannel: number = 0; pasteChannel < pasteHeight; pasteChannel++) {
const channelCopy: ChannelCopy = channelCopies[pasteChannel % channelCopies.length];
const channel: number = this.boxSelectionChannel + pasteChannel;
const copiedBars: number[] = channelCopy["bars"] || [];
if (copiedBars.length == 0) continue;
const pasteWidth: number = fillSelection ? this.boxSelectionWidth : Math.min(copiedBars.length, this._doc.song.barCount - this.boxSelectionBar);
for (let pasteBar: number = 0; pasteBar < pasteWidth; pasteBar++) {
const copiedPatternIndex: number = copiedBars[pasteBar % copiedBars.length] >>> 0;
const bar: number = this.boxSelectionBar + pasteBar;
if (copiedPatternIndex > this._doc.song.patternsPerChannel) {
group.append(new ChangePatternsPerChannel(this._doc, copiedPatternIndex));
}
group.append(new ChangePatternNumbers(this._doc, copiedPatternIndex, bar, channel, 1, 1));
}
}
this._doc.record(group);
}
public selectAll(): void {
new ChangePatternSelection(this._doc, 0, 0);
if (this.boxSelectionBar == 0 &&
this.boxSelectionChannel == 0 &&
this.boxSelectionWidth == this._doc.song.barCount &&
this.boxSelectionHeight == this._doc.song.getChannelCount())
{
this.setTrackSelection(this._doc.bar, this._doc.bar, this._doc.channel, this._doc.channel);
} else {
this.setTrackSelection(0, this._doc.song.barCount - 1, 0, this._doc.song.getChannelCount() - 1);
}
this.selectionUpdated();
}
public selectChannel(): void {
new ChangePatternSelection(this._doc, 0, 0);
if (this.boxSelectionBar == 0 && this.boxSelectionWidth == this._doc.song.barCount) {
this.setTrackSelection(this._doc.bar, this._doc.bar, this.boxSelectionY0, this.boxSelectionY1);
} else {
this.setTrackSelection(0, this._doc.song.barCount - 1, this.boxSelectionY0, this.boxSelectionY1);
}
this.selectionUpdated();
}
public duplicatePatterns(): void {
this._doc.record(new ChangeDuplicateSelectedReusedPatterns(this._doc, this.boxSelectionBar, this.boxSelectionWidth, this.boxSelectionChannel, this.boxSelectionHeight));
}
public muteChannels(allChannels: boolean): void {
if (allChannels) {
let anyMuted: boolean = false;
for (let channel: number = 0; channel < this._doc.song.channels.length; channel++) {
if (this._doc.song.channels[channel].muted) {
anyMuted = true;
break;
}
}
for (let channel: number = 0; channel < this._doc.song.channels.length; channel++) {
this._doc.song.channels[channel].muted = !anyMuted;
}
} else {
let anyUnmuted: boolean = false;
for (const channel of this._eachSelectedChannel()) {
if (!this._doc.song.channels[channel].muted) {
anyUnmuted = true;
break;
}
}
for (const channel of this._eachSelectedChannel()) {
this._doc.song.channels[channel].muted = anyUnmuted;
}
}
this._doc.notifier.changed();
}
public soloChannels(): void {
let alreadySoloed: boolean = true;
for (let channel: number = 0; channel < this._doc.song.channels.length; channel++) {
const shouldBeMuted: boolean = channel < this.boxSelectionChannel || channel >= this.boxSelectionChannel + this.boxSelectionHeight;
if (this._doc.song.channels[channel].muted != shouldBeMuted) {
alreadySoloed = false;
break;
}
}
if (alreadySoloed) {
for (let channel: number = 0; channel < this._doc.song.channels.length; channel++) {
this._doc.song.channels[channel].muted = false;
}
} else {
for (let channel: number = 0; channel < this._doc.song.channels.length; channel++) {
this._doc.song.channels[channel].muted = channel < this.boxSelectionChannel || channel >= this.boxSelectionChannel + this.boxSelectionHeight;
}
}
this._doc.notifier.changed();
}
public forceRhythm(): void {
const group: ChangeGroup = new ChangeGroup();
if (this.boxSelectionWidth > 1 || this.boxSelectionHeight > 1) {
group.append(new ChangeDuplicateSelectedReusedPatterns(this._doc, this.boxSelectionBar, this.boxSelectionWidth, this.boxSelectionChannel, this.boxSelectionHeight));
}
for (const channel of this._eachSelectedChannel()) {
for (const pattern of this._eachSelectedPattern(channel)) {
group.append(new ChangePatternRhythm(this._doc, pattern));
}
}
this._doc.record(group);
}
public forceScale(): void {
const group: ChangeGroup = new ChangeGroup();
if (this.boxSelectionWidth > 1 || this.boxSelectionHeight > 1) {
group.append(new ChangeDuplicateSelectedReusedPatterns(this._doc, this.boxSelectionBar, this.boxSelectionWidth, this.boxSelectionChannel, this.boxSelectionHeight));
}
const scaleFlags: boolean[] = [true, false, false, false, false, false, false, false, false, false, false, false];
for (const channel of this._eachSelectedChannel()) {
if (this._doc.song.getChannelIsNoise(channel)) continue;
for (const pattern of this._eachSelectedPattern(channel)) {
unionOfUsedNotes(pattern, scaleFlags);
}
}
const scaleMap: number[] = generateScaleMap(scaleFlags, this._doc.song.scale);
for (const channel of this._eachSelectedChannel()) {
if (this._doc.song.getChannelIsNoise(channel)) continue;
for (const pattern of this._eachSelectedPattern(channel)) {
group.append(new ChangePatternScale(this._doc, pattern, scaleMap));
}
}
this._doc.record(group);
}
public setTrackSelection(newX0: number, newX1: number, newY0: number, newY1: number): void {
const canReplaceLastChange: boolean = this._doc.lastChangeWas(this._changeTrack);
this._changeTrack = new ChangeGroup();
this._changeTrack.append(new ChangeTrackSelection(this._doc, newX0, newX1, newY0, newY1));
// Don't erase existing redo history just to change track selection.
if (!this._doc.hasRedoHistory()) {
this._doc.record(this._changeTrack, canReplaceLastChange);
}
}
public transpose(upward: boolean, octave: boolean): void {
const canReplaceLastChange: boolean = this._doc.lastChangeWas(this._changeTranspose);
this._changeTranspose = new ChangeGroup();
if (this.boxSelectionWidth > 1 || this.boxSelectionHeight > 1) {
this._changeTranspose.append(new ChangeDuplicateSelectedReusedPatterns(this._doc, this.boxSelectionBar, this.boxSelectionWidth, this.boxSelectionChannel, this.boxSelectionHeight));
}
for (const channel of this._eachSelectedChannel()) {
for (const pattern of this._eachSelectedPattern(channel)) {
this._changeTranspose.append(new ChangeTranspose(this._doc, channel, pattern, upward, false, octave));
}
}
this._doc.record(this._changeTranspose, canReplaceLastChange);
}
public setInstrument(instrument: number): void {
const group: ChangeGroup = new ChangeGroup();
if (this.boxSelectionWidth > 1 || this.boxSelectionHeight > 1) {
group.append(new ChangeDuplicateSelectedReusedPatterns(this._doc, this.boxSelectionBar, this.boxSelectionWidth, this.boxSelectionChannel, this.boxSelectionHeight));
}
for (const channel of this._eachSelectedChannel()) {
for (const pattern of this._eachSelectedPattern(channel)) {
group.append(new ChangePatternInstrument(this._doc, instrument, pattern));
}
}
this._doc.record(group);
}
public resetBoxSelection(): void {
this.boxSelectionX0 = this.boxSelectionX1 = this._doc.bar;
this.boxSelectionY0 = this.boxSelectionY1 = this._doc.channel;
}
} | the_stack |
import React, { Component, useState } from 'react';
import {
Button,
PermissionsAndroid,
Platform,
ScrollView,
StyleSheet,
Text,
TextInput,
View,
} from 'react-native';
import Slider from '@react-native-community/slider';
import { Picker } from '@react-native-picker/picker';
import RtcEngine, {
AudioEffectPreset,
AudioEqualizationBandFrequency,
AudioProfile,
AudioReverbType,
AudioScenario,
ChannelProfile,
ClientRole,
RtcEngineContext,
VoiceBeautifierPreset,
} from 'react-native-agora';
import Item from '../../../components/Item';
const config = require('../../../config/agora.config.json');
interface State {
channelId: string;
isJoined: boolean;
isBeautifierOnly: boolean;
}
export default class VoiceChanger extends Component<{}, State, any> {
_engine?: RtcEngine;
_voiceBeautifierPreset: VoiceBeautifierPreset = 0;
_audioEffectPreset: AudioEffectPreset = 0;
_param1: number = 0;
_param2: number = 0;
_audioReverbType: AudioReverbType = 0;
_audioReverbValue: number = 0;
_audioEqualizationBandFrequency: AudioEqualizationBandFrequency = 0;
_bandGain: number = 0;
_localVoicePitch: number = 0;
constructor(props: {}) {
super(props);
this.state = {
channelId: config.channelId,
isJoined: false,
isBeautifierOnly: false,
};
}
UNSAFE_componentWillMount() {
this._initEngine();
}
componentWillUnmount() {
this._engine?.destroy();
}
_initEngine = async () => {
this._engine = await RtcEngine.createWithContext(
new RtcEngineContext(config.appId)
);
this._addListeners();
// Before calling the method, you need to set the profile
// parameter of setAudioProfile to AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)
// or AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5), and to set
// scenario parameter to AUDIO_SCENARIO_GAME_STREAMING(3)
await this._engine.setAudioProfile(
AudioProfile.MusicHighQualityStereo,
AudioScenario.GameStreaming
);
// make myself a broadcaster
await this._engine.setChannelProfile(ChannelProfile.LiveBroadcasting);
await this._engine?.setClientRole(ClientRole.Broadcaster);
// disable video module
await this._engine?.disableVideo();
// Set audio route to speaker
await this._engine.setDefaultAudioRoutetoSpeakerphone(true);
};
_addListeners = () => {
this._engine?.addListener('Warning', (warningCode) => {
console.info('Warning', warningCode);
});
this._engine?.addListener('Error', (errorCode) => {
console.info('Error', errorCode);
});
this._engine?.addListener('JoinChannelSuccess', (channel, uid, elapsed) => {
console.info('JoinChannelSuccess', channel, uid, elapsed);
// RtcLocalView.SurfaceView must render after engine init and channel join
this.setState({ isJoined: true });
});
this._engine?.addListener('LeaveChannel', (stats) => {
console.info('LeaveChannel', stats);
// RtcLocalView.SurfaceView must render after engine init and channel join
this.setState({ isJoined: false });
});
};
_joinChannel = async () => {
if (Platform.OS === 'android') {
await PermissionsAndroid.requestMultiple([
PermissionsAndroid.PERMISSIONS.RECORD_AUDIO,
]);
}
// start joining channel
// 1. Users can only see each other after they join the
// same channel successfully using the same app id.
// 2. If app certificate is turned on at dashboard, token is needed
// when joining channel. The channel name and uid used to calculate
// the token has to match the ones used for channel join
await this._engine?.joinChannel(
config.token,
this.state.channelId,
null,
config.uid
);
};
_leaveChannel = async () => {
await this._engine?.leaveChannel();
};
render() {
const { channelId, isJoined, isBeautifierOnly } = this.state;
return (
<View style={styles.container}>
<View style={styles.top}>
<TextInput
style={styles.input}
onChangeText={(text) => this.setState({ channelId: text })}
placeholder={'Channel ID'}
value={channelId}
/>
<Item
title={'setVoiceBeautifierPreset Only'}
isShowSwitch
onSwitchValueChange={(value) => {
this.setState({ isBeautifierOnly: value });
}}
/>
<Button
onPress={isJoined ? this._leaveChannel : this._joinChannel}
title={`${isJoined ? 'Leave' : 'Join'} channel`}
/>
</View>
{isJoined && (
<ScrollView>
{isBeautifierOnly ? this._renderBeautifier() : this._renderOthers()}
</ScrollView>
)}
</View>
);
}
_renderBeautifier = () => {
return (
<View style={styles.container}>
<PickerView
alertTitle={'Select AudioEffectPreset:'}
type={VoiceBeautifierPreset}
onPress={(value: VoiceBeautifierPreset) => {
this._voiceBeautifierPreset = value;
}}
/>
<Button
title={'Set Voice Beautifier Preset'}
onPress={() => {
this._engine?.setVoiceBeautifierPreset(this._voiceBeautifierPreset);
}}
/>
<Text>{'param1'}</Text>
<Slider
minimumValue={0}
maximumValue={10}
step={1}
onValueChange={(value) => {
this._param1 = value;
}}
/>
<Text>{'param2'}</Text>
<Slider
minimumValue={0}
maximumValue={10}
step={1}
onValueChange={(value) => {
this._param2 = value;
}}
/>
<Button
title={'Set Voice Beautifier Parameters'}
onPress={() => {
this._engine?.setVoiceBeautifierParameters(
this._voiceBeautifierPreset,
this._param1,
this._param2
);
}}
/>
</View>
);
};
_renderOthers = () => {
return (
<View style={styles.container}>
<View>
<PickerView
alertTitle={'Select AudioEffectPreset:'}
type={AudioEffectPreset}
onPress={(value: AudioEffectPreset) => {
this._audioEffectPreset = value;
}}
/>
<Button
title={'Set Audio Effect Preset'}
onPress={() => {
this._engine?.setAudioEffectPreset(this._audioEffectPreset);
}}
/>
<Text>{'param1'}</Text>
<Slider
minimumValue={0}
maximumValue={10}
step={1}
onValueChange={(value) => {
this._param1 = value;
}}
/>
<Text>{'param2'}</Text>
<Slider
minimumValue={0}
maximumValue={10}
step={1}
onValueChange={(value) => {
this._param2 = value;
}}
/>
<Button
title={'Set Audio Effect Parameters'}
onPress={() => {
this._engine?.setAudioEffectParameters(
this._audioEffectPreset,
this._param1,
this._param2
);
}}
/>
</View>
<View>
<PickerView
alertTitle={'Select AudioReverbType:'}
type={AudioReverbType}
onPress={(value: AudioReverbType) => {
this._audioReverbType = value;
}}
/>
<Text>{'Select Local Voice Reverb Value:'}</Text>
<Slider
minimumValue={-20}
maximumValue={10}
step={1}
onValueChange={(value) => {
this._audioReverbValue = value;
}}
/>
<Button
title={'Set Local Voice Reverb'}
onPress={() => {
this._engine?.setLocalVoiceReverb(
this._audioReverbType,
this._audioReverbValue
);
}}
/>
</View>
<View>
<PickerView
alertTitle={'Select AudioEqualizationBandFrequency:'}
type={AudioEqualizationBandFrequency}
onPress={(value: AudioEqualizationBandFrequency) => {
this._audioEqualizationBandFrequency = value;
}}
/>
<Text>{'Select Local Voice Equalization Value:'}</Text>
<Slider
minimumValue={-15}
maximumValue={15}
step={1}
onValueChange={(value) => {
this._bandGain = value;
}}
/>
<Button
title={'Set Local Voice Equalization'}
onPress={() => {
this._engine?.setLocalVoiceEqualization(
this._audioEqualizationBandFrequency,
this._bandGain
);
}}
/>
</View>
<View>
<Text>{'Select Local Voice Pitch Value:'}</Text>
<Slider
minimumValue={0.5}
maximumValue={2.0}
step={0.1}
onValueChange={(value) => {
this._localVoicePitch = value;
}}
/>
<Button
title={'Set Local Voice Pitch'}
onPress={() => {
this._engine?.setLocalVoicePitch(this._localVoicePitch);
}}
/>
</View>
</View>
);
};
}
const PickerView = ({
alertTitle,
type,
onPress,
}: {
alertTitle: string;
type: any;
onPress: Function;
}) => {
const items = Object.values(type);
const keys = items.filter((v) => typeof v === 'string') as string[];
const values = items.filter((v) => typeof v === 'number') as number[];
const [value, setValue] = useState(values[0]);
return (
<View>
<Text>{alertTitle}</Text>
<Picker
selectedValue={value}
onValueChange={(itemValue) => {
setValue(itemValue);
onPress(itemValue);
}}
>
{keys.map((v, i) => (
<Picker.Item key={i} label={v} value={values[i]} />
))}
</Picker>
</View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
},
top: {
width: '100%',
},
input: {
borderColor: 'gray',
borderWidth: 1,
color: 'black',
},
}); | the_stack |
import path from "path";
import * as fs from "fs-extra";
import fastGlob from "fast-glob";
import fixturez from "fixturez";
import spawn from "spawndamnit";
import outdent from "outdent";
import crypto from "crypto";
// import profiler from "v8-profiler-next";
import chalk from "chalk";
let f = fixturez(__dirname);
export const js = outdent;
export const ts = outdent;
export const tsx = outdent;
chalk.level = 0;
console.error = jest.fn();
console.log = jest.fn();
export let logMock = {
log: (console.log as any) as jest.MockInstance<void, any>,
error: (console.error as any) as jest.MockInstance<void, any>,
};
afterEach(() => {
logMock.log.mockReset();
logMock.error.mockReset();
});
import init from "../src/init";
import { confirms } from "../src/messages";
import normalizePath from "normalize-path";
let mockedConfirms = confirms as jest.Mocked<typeof confirms>;
export async function initBasic(directory: string) {
mockedConfirms.writeMainField.mockReturnValue(Promise.resolve(true));
mockedConfirms.writeModuleField.mockReturnValue(Promise.resolve(true));
await init(directory);
mockedConfirms.writeMainField.mockReset();
mockedConfirms.writeModuleField.mockReset();
}
// export function profile(name: string) {
// profiler.startProfiling(name, true);
// return () => {
// let profile = profiler.stopProfiling();
// new Promise<void>((resolve) =>
// profile
// .export()
// .pipe(fs.createWriteStream(name + ".cpuprofile"))
// .on("finish", function () {
// profile.delete();
// resolve();
// })
// );
// };
// }
function getPkgPath(tmpPath: string) {
return path.join(tmpPath, "package.json");
}
export async function getPkg(
filepath: string
): Promise<{ [key: string]: any }> {
return JSON.parse(
await fs.readFile(path.join(filepath, "package.json"), "utf-8")
);
}
export async function modifyPkg(
tmpPath: string,
cb: (pkgJson: { [key: string]: any }) => unknown
) {
let json = await getPkg(tmpPath);
await cb(json);
let pkgPath = getPkgPath(tmpPath);
await fs.writeFile(pkgPath, JSON.stringify(json, null, 2));
}
export let createPackageCheckTestCreator = (
doResult: (tmpPath: string) => Promise<void>
) => {
let createTestCreator = (
testFn: (testName: string, func: () => Promise<void>) => void
) => async (
testName: string,
entrypoints: { [key: string]: any },
cb: (
doThing: () => Promise<{ [key: string]: any }>,
stuff: { dir: string }
) => Promise<void>
) => {
testFn(testName, async () => {
let tmpPath = f.copy("template-simple-package");
let things = Object.keys(entrypoints);
await Promise.all(
things.map(async (entrypointPath) => {
let content = entrypoints[entrypointPath];
let filepath = path.join(tmpPath, entrypointPath, "package.json");
await fs.ensureFile(filepath);
await fs.writeFile(filepath, JSON.stringify(content, null, 2));
})
);
await cb(
async () => {
await doResult(tmpPath);
let newThings: Record<string, any> = {};
await Promise.all(
things.map(async (entrypointPath) => {
newThings[entrypointPath] = JSON.parse(
await fs.readFile(
path.join(tmpPath, entrypointPath, "package.json"),
"utf8"
)
);
})
);
return newThings;
},
{ dir: tmpPath }
);
});
};
let testFn = createTestCreator(test);
// @ts-ignore
testFn.only = createTestCreator(test.only);
// @ts-ignore
testFn.skip = createTestCreator(test.skip);
return testFn as typeof testFn & { only: typeof testFn; skip: typeof testFn };
};
export async function snapshotDistFiles(tmpPath: string) {
let distPath = path.join(tmpPath, "dist");
let distFiles;
try {
distFiles = await fs.readdir(distPath);
} catch (err) {
if (err.code === "ENOENT") {
throw new Error(distPath + " does not exist");
}
throw err;
}
await Promise.all(
distFiles.map(async (x) => {
expect(await readNormalizedFile(path.join(distPath, x))).toMatchSnapshot(
normalizePath(x)
);
})
);
}
function hash(content: string) {
return crypto.createHash("md5").update(content).digest("hex");
}
export let stripHashes = async (chunkName: string) => {
let transformer = (pathname: string, content: string) => {
return pathname.replace(new RegExp(`${chunkName}-[^\\.]+`, "g"), () => {
return `chunk-this-is-not-the-real-hash-${hash(content)}`;
});
};
return {
transformPath: transformer,
transformContent: (content: string) => {
return content.replace(new RegExp(`${chunkName}-[^\\.]+`, "g"), () => {
return "chunk-some-hash";
});
},
};
};
export async function snapshotDirectory(
tmpPath: string,
{
files = "js",
filterPath = (x) => true,
transformPath = (x) => x,
transformContent = (x) => x,
}: {
files?: "all" | "js";
filterPath?: (path: string) => boolean;
transformPath?: (path: string, contents: string) => string;
transformContent?: (content: string) => string;
} = {}
) {
let paths = await fastGlob(
[`**/${files === "js" ? "*.js" : "*"}`, "!node_modules/**", "!yarn.lock"],
{
cwd: tmpPath,
}
);
await Promise.all(
paths
.filter((fp) => filterPath(fp))
.map(async (x) => {
let content = transformContent(
await readNormalizedFile(path.join(tmpPath, x))
);
if (x.endsWith(".json") && !x.endsWith("tsconfig.json")) {
content = JSON.parse(content);
}
expect(content).toMatchSnapshot(
normalizePath(transformPath(x, content))
);
})
);
}
export async function install(tmpPath: string) {
await spawn("yarn", ["install"], { cwd: tmpPath });
}
export const repoNodeModules = path.resolve(
__dirname,
"..",
"..",
"..",
"node_modules"
);
export const typescriptFixture = {
node_modules: { kind: "symlink", path: repoNodeModules },
"package.json": JSON.stringify({
name: "typescript",
main: "dist/typescript.cjs.js",
module: "dist/typescript.esm.js",
dependencies: {
"@types/node": "^12.7.1",
typescript: "^3.4.5",
},
}),
"tsconfig.json": `{
"compilerOptions": {
/* Basic Options */
"target": "esnext" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */,
"module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */,
// "lib": [], /* Specify library files to be included in the compilation. */
// "allowJs": true, /* Allow javascript files to be compiled. */
// "checkJs": true, /* Report errors in .js files. */
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
// "declaration": true, /* Generates corresponding '.d.ts' file. */
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
// "sourceMap": true, /* Generates corresponding '.map' file. */
// "outFile": "./", /* Concatenate and emit output to single file. */
// "outDir": "./", /* Redirect output structure to the directory. */
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
// "composite": true, /* Enable project compilation */
// "incremental": true, /* Enable incremental compilation */
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
// "removeComments": true, /* Do not emit comments to output. */
// "noEmit": true, /* Do not emit outputs. */
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
/* Strict Type-Checking Options */
"strict": true /* Enable all strict type-checking options. */,
// "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
// "strictNullChecks": true, /* Enable strict null checks. */
// "strictFunctionTypes": true, /* Enable strict checking of function types. */
// "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
/* Additional Checks */
// "noUnusedLocals": true, /* Report errors on unused locals. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
/* Module Resolution Options */
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
// "typeRoots": [], /* List of folders to include type definitions from. */
// "types": [], /* Type declaration files to be included in compilation. */
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */,
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
/* Source Map Options */
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
/* Experimental Options */
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
"noEmit": true
}
}`,
".babelrc": JSON.stringify({
presets: [require.resolve("@babel/preset-typescript")],
}),
"src/index.ts": ts`
import { SomeType } from "./another-thing";
export * from "./one-more-thing";
import * as path from "path";
export { path };
let thing: SomeType = "something";
export default thing;
`,
"src/another-thing.ts": ts`
export type SomeType = string;
`,
"src/one-more-thing.js": js`
export let obj = {};
`,
"src/one-more-thing.d.ts": ts`
declare var obj: object;
export { obj };
`,
} as const;
type Fixture = {
[key: string]: string | { kind: "symlink"; path: string };
};
// basically replicating https://github.com/nodejs/node/blob/72f9c53c0f5cc03000f9a4eb1cf31f43e1d30b89/lib/fs.js#L1163-L1174
// for some reason the builtin auto-detection doesn't work, the code probably doesn't land go into that logic or something
async function getSymlinkType(targetPath: string): Promise<"dir" | "file"> {
const stat = await fs.stat(targetPath);
return stat.isDirectory() ? "dir" : "file";
}
export async function testdir(dir: Fixture) {
const temp = f.temp();
await Promise.all(
Object.keys(dir).map(async (filename) => {
const output = dir[filename];
const fullPath = path.join(temp, filename);
if (typeof output === "string") {
await fs.outputFile(fullPath, dir[filename]);
} else {
const dir = path.dirname(fullPath);
await fs.ensureDir(dir);
const targetPath = path.resolve(temp, output.path);
const symlinkType = await getSymlinkType(targetPath);
await fs.symlink(targetPath, fullPath, symlinkType);
}
})
);
return temp;
}
expect.addSnapshotSerializer({
print(val: Record<string, string>, serialize, indent) {
const contentsByFilename: Record<string, string[]> = {};
Object.entries(val).forEach(([filename, contents]) => {
if (contentsByFilename[contents] === undefined) {
contentsByFilename[contents] = [];
}
contentsByFilename[contents].push(filename);
});
return Object.entries(contentsByFilename)
.map(([contents, filenames]) => {
return `⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ ${filenames.join(
", "
)} ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯\n${contents}`;
})
.join("\n");
},
test(val) {
return val && val[dirPrintingSymbol];
},
});
const dirPrintingSymbol = Symbol("dir printing symbol");
export async function getDist(dir: string) {
return getFiles(dir, ["dist/**"]);
}
async function readNormalizedFile(filePath: string): Promise<string> {
let content = await fs.readFile(filePath, "utf8");
// to normalise windows line endings
content = content.replace(/\r\n/g, "\n");
if (/(?<!(\.d\.ts))\.map$/.test(filePath)) {
const sourceMap = JSON.parse(content);
sourceMap.sourcesContent = sourceMap.sourcesContent.map((source: string) =>
source.replace(/\r\n/g, "\n")
);
content = JSON.stringify(sourceMap);
}
return content;
}
export async function getFiles(dir: string, glob: string[] = ["**"]) {
const files = await fastGlob(glob, { cwd: dir });
const filesObj: Record<string, string> = {
[dirPrintingSymbol]: true,
};
await Promise.all(
files.map(async (filename) => {
filesObj[filename] = await readNormalizedFile(path.join(dir, filename));
})
);
let newObj: Record<string, string> = { [dirPrintingSymbol]: true };
files.sort().forEach((filename) => {
newObj[filename] = filesObj[filename];
});
return newObj;
}
export const basicPkgJson = (options?: {
module?: boolean;
dependencies?: Record<string, string>;
umdName?: string;
}) => {
return JSON.stringify({
name: "pkg",
main: "dist/pkg.cjs.js",
module: options?.module ? "dist/pkg.esm.js" : undefined,
...(options?.umdName && {
"umd:main": "dist/pkg.umd.min.js",
preconstruct: {
umdName: options.umdName,
},
}),
dependencies: options?.dependencies,
});
}; | the_stack |
* TeeChart(tm) for TypeScript
*
* v1.3 October 2012
* Copyright(c) 2012 by Steema Software SL. All Rights Reserved.
* http://www.steema.com
*
* Licensed with commercial and non-commercial attributes,
* specifically: http://www.steema.com/licensing/html5
*
* TypeScript is a Microsoft product: www.typescriptlang.org
*
*/
/**
* @author <a href="mailto:david@steema.com">Steema Software</a>
* @version 1.3
*/
declare namespace Tee {
interface IPoint {
x: number;
y: number;
}
interface IRectangle {
x: number;
y: number;
width: number;
height: number;
contains(point: IPoint): boolean;
}
interface ITool {
active: boolean;
chart: IChart;
mousedown(event: any): boolean;
mousemove(event: any): boolean;
clicked(p: IPoint): boolean;
draw(): void;
}
interface IGradient {
chart: IChart;
visible: boolean;
colors: string[];
direction: string;
stops: number[];
offset: IPoint;
}
interface IShadow {
chart: IChart;
visible: boolean;
blur: number;
color: string;
width: number;
height: number;
}
interface IStroke {
chart: IChart;
fill: string;
size: number;
join: string;
cap: string;
dash: number[];
gradient: IGradient;
}
interface IFont {
chart: IChart;
style: string;
gradient: IGradient;
fill: string;
stroke: IStroke;
shadow: IShadow;
textAlign: string;
baseLine: string;
getSize(): number;
setSize(size: number): void;
}
interface IImage {
url: string;
chart: IChart;
visible: boolean;
}
interface IFormat {
font: IFont;
gradient: IGradient;
shadow: IShadow;
stroke: IStroke;
round: IPoint;
transparency: number;
image: IImage;
fill: string;
textHeight(text: string): number;
textWidth(text: string): number;
drawText(bounds: IRectangle, text: string): any;
rectangle(x: number, y: number, width: number, height: number): any;
poligon(points: IPoint[]): any;
ellipse(x: number, y: number, width: number, height: number): any;
}
interface IMargins {
left: number;
top: number;
right: number;
bottom: number;
}
interface IAnnotation extends ITool {
position: IPoint;
margins: IMargins;
items: IAnnotation[];
bounds: IRectangle;
visible: boolean;
transparent: boolean;
text: string;
format: IFormat;
add(text: string): IAnnotation;
resize(): void;
clicked(point: IPoint): boolean;
draw(): void;
}
interface IPanel {
format: IFormat;
transparent: boolean;
margins: IMargins;
}
interface ITitle extends IAnnotation {
expand: boolean;
padding: number;
transparent: boolean;
}
interface IPalette {
colors: string[];
get(index: number): string;
}
interface IArrow extends IFormat {
length: number;
underline: boolean;
}
interface IMarks extends IAnnotation {
arrow: IArrow;
series: ISeries;
style: string;
drawEvery: number;
visible: boolean;
}
interface ISeriesData {
values: number[];
labels: string[];
source: any;
}
interface ICursor {
cursor: string;
}
interface ISeriesNoBounds {
data: ISeriesData;
marks: IMarks;
yMandatory: boolean;
horizAxis: string;
vertAxis: string;
format: IFormat;
hover: IFormat;
visible: boolean;
cursor: ICursor;
over: number;
palette: IPalette;
colorEach: string;
useAxes: boolean;
decimals: number;
title: string;
//refresh(failure: function): void;
toPercent(index: number): string;
markText(index: number): string;
valueText(index: number): string;
associatedToAxis(axis: IAxis): boolean;
calc(index: number, position: IPoint): void;
clicked(position: IPoint): number;
minXValue(): number;
maxXValue(): number;
minYValue(): number;
maxYValue(): number;
count(): number;
addRandom(count: number, range?: number, x?: boolean): ISeries;
}
interface ISeries extends ISeriesNoBounds {
bounds(rectangle: IRectangle): void;
}
interface IAxisLabels {
chart: IChart;
format: IFormat;
decimals: number;
padding: number;
separation: number; // %
visible: boolean;
rotation: number;
alternate: boolean;
maxWidth: number;
labelStyle: string;
dateFormat: string;
getLabel(value: number): string;
width(value: number): number;
}
interface IGrid {
chart: IChart;
format: IFormat;
visible: boolean;
lineDash: boolean;
}
interface ITicks {
chart: IChart;
stroke: IStroke;
visible: boolean;
length: number;
}
interface IMinorTicks extends ITicks {
count: number;
}
interface IAxisTitle extends IAnnotation {
padding: number;
transparent: boolean;
}
interface IAxis {
chart: IChart;
visible: boolean;
inverted: boolean;
horizontal: boolean; // readonly
otherSize: boolean; // readonly
bounds: IRectangle; // readonly?
position: number;
format: IFormat;
custom: boolean; // readonly
grid: IGrid;
labels: IAxisLabels;
ticks: ITicks;
minorTicks: IMinorTicks;
innerTicks: ITicks;
title: IAxisTitle;
automatic: boolean;
minimum: number;
maximum: number;
increment: number;
log: boolean;
startPos: number;
endPos: number;
start: number; // %
end: number; // %
axisSize: number;
scale: number;
increm: number;
calc(value: number): number;
fromPos(position: number): number;
fromSize(size: number): number;
hasAnySeries(): boolean;
scroll(delta: number): void;
setMinMax(minimum: number, maximum: number): void;
}
interface IAxes {
chart: IChart;
visible: boolean;
left: IAxis;
top: IAxis;
right: IAxis;
bottom: IAxis;
items: IAxis[];
add(horizontal: boolean, otherSide: boolean): IAxis;
//each(f: function): void;
}
interface ISymbol {
chart: IChart;
format: IFormat;
width: number;
height: number;
padding: number;
visible: boolean;
}
interface ILegend {
chart: IChart;
transparent: boolean;
format: IFormat;
title: IAnnotation;
bounds: IRectangle;
position: string;
visible: boolean;
inverted: boolean;
padding: number;
align: number;
fontColor: boolean;
dividing: IStroke;
over: number;
symbol: ISymbol;
itemHeight: number;
innerOff: number;
legendStyle: string;
textStyle: string;
availRows(): number;
itemsCount(): number;
totalWidth(): number;
showValues(): boolean;
itemText(series: ISeries, index: number): string;
isVertical(): boolean;
}
interface IScroll {
chart: IChart;
active: boolean;
enabled: boolean;
direction: string;
mouseButton: number;
position: IPoint;
}
interface ISeriesList {
chart: IChart;
items: ISeries[];
anyUsesAxes(): boolean;
clicked(position: IPoint): boolean;
//each(f: function): void;
firstVisible(): ISeries;
}
interface ITools {
chart: IChart;
items: ITool[];
add(tool: ITool): ITool;
}
interface IWall {
format: IFormat;
visible: boolean;
bounds: IRectangle;
}
interface IWalls {
visible: boolean;
left: IWall;
right: IWall;
bottom: IWall;
back: IWall;
}
interface IZoom {
chart: IChart;
active: boolean;
direction: string;
enabled: boolean;
mouseButton: number;
format: IFormat;
reset(): void;
}
interface IChart {
addSeries(series: ISeries): ISeries;
draw(context?: CanvasRenderingContext2D): any;
}
// SERIES
interface ICustomBar extends ISeries {
sideMargins: number;
useOrigin: boolean;
origin: number;
offset: number;
barSize: number;
barStyle: string;
stacked: string;
}
interface ISeriesPointer {
chart: IChart;
format: IFormat;
visible: boolean;
colorEach: boolean;
style: string;
width: number;
height: number;
}
interface ICustomSeries extends ISeries {
pointer: ISeriesPointer;
stacked: string;
stairs: boolean;
}
interface ILine extends ICustomSeries {
smooth: number;
}
interface ISmoothLine extends ILine {
smooth: number;
}
interface IArea extends ISeries {
useOrigin: boolean;
origin: number;
}
interface IPie extends ISeries {
donut: number;
rotation: number;
sort: string;
orderAscending: boolean;
explode: number[];
concentric: boolean;
calcPos(angle: number, position: IPoint): void;
}
interface IBubbleData extends ISeriesData {
radius: number[];
}
interface IBubble extends ICustomSeries {
data: IBubbleData;
}
interface IGanttData extends ISeriesData {
start: number[];
x: number[];
end: number[];
}
interface IGantt extends ISeriesNoBounds {
data: IGanttData;
dateFormat: string;
colorEach: string;
height: number;
margin: IPoint;
add(index: number, label: string, start: number, end: number): void;
bounds(index: number, rectangle: IRectangle): void;
}
interface ICandleData extends ISeriesData {
open: number[];
close: number[];
high: number[];
low: number[];
}
interface ICandle extends ICustomSeries {
data: ICandleData;
higher: IFormat;
lower: IFormat;
style: string;
}
// TOOLS
interface IDragTool extends ITool {
series: ISeries;
}
interface ICursorTool extends ITool {
direction: string;
size: IPoint;
followMouse: boolean;
dragging: number;
format: IFormat;
horizAxis: IAxis;
vertAxis: IAxis;
render: string;
over(point: IPoint): boolean;
setRender(render: string): void;
}
interface IToolTip extends IAnnotation {
animated: number;
autoHide: boolean;
autoRedraw: boolean;
currentSeries: ISeries;
currentIndex: number;
delay: number;
hide(): void;
refresh(series: ISeries, index: number): void;
}
class Point implements IPoint {
public x: number;
public y: number;
}
class Chart implements IChart {
//public aspect: IAspect;
public axes: IAxes;
public footer: ITitle;
public legend: ILegend;
public panel: IPanel;
public scroll: IScroll;
public series: ISeriesList;
public title: ITitle;
public tools: ITools;
public walls: IWalls;
public zoom: IZoom;
public bounds: IRectangle;
public canvas: HTMLCanvasElement;
public chartRect: IRectangle;
public palette: IPalette;
constructor(canvas: string);
addSeries(series: ISeries): ISeries;
getSeries(index: number): ISeries;
removeSeries(series: ISeries): void;
draw(context?: CanvasRenderingContext2D): any;
toImage(image: HTMLImageElement, format: string, quality: number): void;
}
// SERIES
var Line: {
prototype: ILine;
new (values?: number[]): ILine;
}
var PointXY: {
prototype: ICustomSeries;
new (values?: number[]): ICustomSeries;
}
var Area: {
prototype: IArea;
new (values?: number[]): IArea;
}
var HorizArea: {
prototype: IArea;
new (values?: number[]): IArea;
}
var Bar: {
prototype: ICustomBar;
new (values?: number[]): ICustomBar;
}
var HorizBar: {
prototype: ICustomBar;
new (values?: number[]): ICustomBar;
}
var Pie: {
prototype: IPie;
new (values?: number[]): IPie;
}
var Donut: {
prototype: IPie;
new (values?: number[]): IPie;
}
var Bubble: {
prototype: IBubble;
new (values?: number[]): IBubble;
}
var Gantt: {
prototype: IGantt;
new (values?: number[]): IGantt;
}
var Volume: {
prototype: ICustomBar;
new (values?: number[]): ICustomBar;
}
var Candle: {
prototype: ICandle;
new (values?: number[]): ICandle;
}
// TOOLS
var CursorTool: {
prototype: ICursorTool;
new (chart?: Chart): ICursorTool;
}
var DragTool: {
prototype: IDragTool;
new (chart?: Chart): IDragTool;
}
var ToolTip: {
prototype: IToolTip;
new (chart?: Chart): IToolTip;
}
} | the_stack |
var universe = require("../parser/tools/universe");
import coreApi = require("../parser/wrapped-ast/parserCoreApi");
import core = require("../parser/wrapped-ast/parserCore");
import proxy = require("../parser/ast.core/LowLevelASTProxy");
import def = require("raml-definition-system")
import tsInterfaces = def.tsInterfaces;
import hl = require("../parser/highLevelAST");
import ll = require("../parser/lowLevelAST");
import yaml = require("yaml-ast-parser");
import hlImpl = require("../parser/highLevelImpl");
import builder = require("../parser/ast.core/builder");
import typeSystem = def.rt;
import nominals = typeSystem.nominalTypes;
import universeHelpers = require("../parser/tools/universeHelpers")
import universes = require("../parser/tools/universe")
import util = require("../util/index")
import _ = require("underscore");
import RamlWrapper10 = require("../parser/artifacts/raml10parserapi");
var pathUtils = require("path");
export function dump(node: coreApi.BasicNode|coreApi.AttributeNode, serializeMeta:boolean = true):any{
return new JsonSerializer({
rootNodeDetails : true,
serializeMetadata : serializeMeta
}).dump(node);
}
export class JsonSerializer {
constructor(private options?:SerializeOptions) {
this.options = this.options || {};
if (this.options.serializeMetadata == null) {
this.options.serializeMetadata = true;
}
}
private nodeTransformers:Transformation[] = [
new ResourcesTransformer(),
new TypeExampleTransformer(),
new TypeDefaultValueTransformer(),
//new ParametersTransformer(),
new ArrayExpressionTransformer(),
new TypesTransformer(),
//new UsesTransformer(),
//new PropertiesTransformer(),
//new ExamplesTransformer(),
//new ResponsesTransformer(),
//new BodiesTransformer(),
//new AnnotationsTransformer(),
new SecuritySchemesTransformer(),
new AnnotationTypesTransformer(),
new TemplateParametrizedPropertiesTransformer(),
new TraitsTransformer(),
new ResourceTypesTransformer(),
//new FacetsTransformer(),
new SchemasTransformer(),
new ProtocolsToUpperCaseTransformer(),
new ResourceTypeMethodsToMapTransformer(),
new ReferencesTransformer(),
new SimpleNamesTransformer(),
//new OneElementArrayTransformer()
];
private nodePropertyTransformers:Transformation[] = [
//new ResourcesTransformer(),
//new TypeExampleTransformer(),
new ParametersTransformer(),
//new TypesTransformer(),
//new UsesTransformer(),
new PropertiesTransformer(),
// //new ExamplesTransformer(),
new ResponsesTransformer(),
new BodiesTransformer(),
new AnnotationsTransformer(),
//new SecuritySchemesTransformer(),
//new AnnotationTypesTransformer(),
//new TemplateParametrizedPropertiesTransformer(),
//new TraitsTransformer(),
//new ResourceTypesTransformer(),
new FacetsTransformer(),
//new SchemasTransformer(),
//new ProtocolsToUpperCaseTransformer(),
//new ResourceTypeMethodsToMapTransformer(),
//new ReferencesTransformer(),
new OneElementArrayTransformer()
];
private ignore:ObjectPropertyMatcher = new CompositeObjectPropertyMatcher([
new BasicObjectPropertyMatcher(universeHelpers.isResponseType, universeHelpers.isDisplayNameProperty),
new BasicObjectPropertyMatcher(universeHelpers.isApiSibling, universeHelpers.isDisplayNameProperty),
new BasicObjectPropertyMatcher(universeHelpers.isAnnotationRefTypeOrDescendant, universeHelpers.isAnnotationProperty),
new BasicObjectPropertyMatcher(universeHelpers.isSecuritySchemeRefType, universeHelpers.isSecuritySchemeProperty),
new BasicObjectPropertyMatcher(universeHelpers.isTraitRefType, universeHelpers.isTraitProperty),
new BasicObjectPropertyMatcher(universeHelpers.isResourceTypeRefType, universeHelpers.isResourceTypeProperty),
new BasicObjectPropertyMatcher(universeHelpers.isApiSibling, universeHelpers.isRAMLVersionProperty),
new BasicObjectPropertyMatcher(universeHelpers.isTypeDeclarationDescendant, universeHelpers.isStructuredItemsProperty),
]);
private missingProperties:PropertiesData = new PropertiesData();
printMissingProperties():string {
return this.missingProperties.print();
}
dump(node:any):any {
var highLevelNode = node.highLevel();
var highLevelParent = highLevelNode && highLevelNode.parent();
var rootNodeDetails = !highLevelParent && this.options.rootNodeDetails;
var result = this.dumpInternal(node, rootNodeDetails, true);
return result;
}
dumpInternal(node:any, rootNodeDetails:boolean = false, isRoot = false, nodeProperty?:hl.IProperty):any {
if (node == null) {
return null;
}
if (core.BasicNodeImpl.isInstance(node)) {
var props:{[key:string]:nominals.IProperty} = {};
var basicNode:coreApi.BasicNode = <coreApi.BasicNode>node;
var definition = basicNode.highLevel().definition();
definition.allProperties().filter(x=>{
if(this.ignore.match(basicNode.definition(), x)){
return false;
}
else if(!isRoot && x.nameId()=="uses" &&!isFragment(basicNode.highLevel())){
return false;
}
return true;
}).forEach(x=> {
props[x.nameId()] = x;
});
(<def.NodeClass>definition).allCustomProperties().filter(x=>!this.ignore.match(basicNode.definition(), x)).forEach(x=> {
props[x.nameId()] = x;
});
var obj = this.dumpProperties(props, node);
if (props["schema"]) {
if (this.options.dumpSchemaContents) {
if (props["schema"].range().key() == universes.Universe08.SchemaString) {
var schemas = basicNode.highLevel().root().elementsOfKind("schemas");
schemas.forEach(x=> {
if (x.name() == obj["schema"]) {
var vl = x.attr("value");
if (vl) {
obj["schema"] = vl.value();
obj["schemaContent"] = vl.value();
}
}
})
}
}
}
this.serializeScalarsAnnotations(obj, basicNode, props);
this.serializeMeta(obj, basicNode);
if (this.canBeFragment(node)) {
if (RamlWrapper10.isFragment(<any>node)) {
var fragment = RamlWrapper10.asFragment(<any>node);
var uses = fragment.uses();
if (uses.length > 0) {
obj["uses"] = uses.map(x=>x.toJSON());
}
}
}
if(universeHelpers.isTypeDeclarationDescendant(definition)){
var prop = basicNode.highLevel().property();
if(prop&&!(universeHelpers.isHeadersProperty(prop)
||universeHelpers.isQueryParametersProperty(prop)
||universeHelpers.isUriParametersProperty(prop)
||universeHelpers.isPropertiesProperty(prop)
||universeHelpers.isBaseUriParametersProperty(prop))){
delete obj["required"];
var metaObj = obj["__METADATA__"]
if(metaObj){
var pMetaObj = metaObj["primitiveValuesMeta"];
if(pMetaObj){
delete pMetaObj["required"];
}
}
}
}
if(this.options.sourceMap && typeof obj == "object") {
let unitPath = hlImpl.actualPath(node.highLevel());
if(node.highLevel().lowLevel() && node.highLevel().lowLevel().actual() && node.highLevel().lowLevel().actual().actualPath){
unitPath = node.highLevel().lowLevel().actual().actualPath
}
let sourceMap = obj.sourceMap;
if(!sourceMap){
sourceMap = {};
obj.sourceMap = sourceMap;
}
if(!sourceMap.path) {
sourceMap.path = unitPath;
}
}
this.nodeTransformers.forEach(x=> {
if (x.match(node, nodeProperty||node.highLevel().property())) {
obj = x.transform(obj, node);
}
});
var result:any = {};
if (rootNodeDetails) {
if (definition) {
let universe = definition.universe();
var ramlVersion = universe.version();
result.ramlVersion = ramlVersion;
let typeName = definition.nameId();
if(!typeName){
if(definition.isAssignableFrom(def.universesInfo.Universe10.TypeDeclaration.name)){
let typeDecl = universe.type(def.universesInfo.Universe10.TypeDeclaration.name);
let map:any = {};
typeDecl.allSubTypes().forEach(x=>map[x.nameId()]=true);
for(let st of definition.allSuperTypes()){
if(map[st.nameId()]){
typeName = st.nameId();
break;
}
}
}
}
result.type = typeName;
}
result.specification = obj;
result.errors = this.dumpErrors(basicNode.errors());
}
else {
result = obj;
}
return result;
}
else if (core.AttributeNodeImpl.isInstance(node)) {
var props:{[key:string]:nominals.IProperty} = {};
var attrNode:coreApi.AttributeNode = <coreApi.AttributeNode>node;
var definition = attrNode.highLevel().definition();
var pRange = attrNode.highLevel().property().range();
if(pRange.isArray()){
pRange = pRange.array().componentType();
}
(<def.ValueType>definition).allCustomProperties().filter(x=>!this.ignore.match(pRange, x)).forEach(x=> {
props[x.nameId()] = x;
});
var isValueType = pRange.isValueType();
if (isValueType && attrNode['value']) {
var val = attrNode['value']();
if (val == null || typeof val == 'number' || typeof val == 'string' || typeof val == 'boolean') {
return val;
}
else if (pRange.isAssignableFrom(universe.Universe08.StringType.name)&&hlImpl.StructuredValue.isInstance(val)) {
var sVal = (<hlImpl.StructuredValue>val);
var llNode = sVal.lowLevel();
val = llNode ? llNode.dumpToObject() : null;
return val;
}
}
var obj = this.dumpProperties(props, node);
this.nodeTransformers.forEach(x=> {
if (x.match(node, node.highLevel().property())) {
obj = x.transform(obj,node);
}
});
this.serializeScalarsAnnotations(obj, node, props);
this.serializeMeta(obj, attrNode);
return obj;
}
else if (core.TypeInstanceImpl.isInstance(node)) {
return this.serializeTypeInstance(<core.TypeInstanceImpl>node);
}
else if (core.TypeInstancePropertyImpl.isInstance(node)) {
return this.serializeTypeInstanceProperty(<core.TypeInstancePropertyImpl>node);
}
return node;
}
private canBeFragment(node:core.BasicNodeImpl) {
var definition = node.definition();
var arr = [definition].concat(definition.allSubTypes());
var arr1 = arr.filter(x=>x.getAdapter(def.RAMLService).possibleInterfaces()
.filter(y=>y.nameId() == def.universesInfo.Universe10.FragmentDeclaration.name).length > 0);
return arr1.length > 0;
}
private dumpErrors(errors:core.RamlParserError[]) {
return errors.map(x=> {
var eObj = this.dumpErrorBasic(x);
if (x.trace && x.trace.length > 0) {
eObj['trace'] = this.dumpErrors(x.trace);//x.trace.map(y=>this.dumpErrorBasic(y));
}
return eObj;
}).sort((x, y)=> {
if (x.path != y.path) {
return x.path.localeCompare(y.path);
}
if(y.range.start==null){
return 1;
}
else if(x.range.start==null){
return -1;
}
if (x.range.start.position != y.range.start.position) {
return x.range.start.position - y.range.start.position;
}
return x.code - y.code;
});
}
private dumpErrorBasic(x) {
var eObj:any = {
"code": x.code, //TCK error code
"message": x.message,
"path": x.path,
"line": x.line,
"column": x.column,
"position": x.start,
"range": x.range
};
if (x.isWarning === true) {
eObj.isWarning = true;
}
return eObj;
}
private stringLooksLikeXML(contents: string) : boolean {
return (contents[0] === "<" && contents[contents.length - 1] === ">");
}
private stringLooksLikeJSON(contents: string) : boolean {
return (contents[0] === "{" && contents[contents.length - 1] === "}")
}
private dumpProperties(props, node:coreApi.BasicNode|coreApi.AttributeNode):any {
var obj:any = {};
var definition:hl.ITypeDefinition;
if(node.highLevel().isElement()){
definition = node.highLevel().definition();
}
let scalarsSources:any = {};
Object.keys(props).forEach(propName=> {
var nodeProperty:hl.IProperty;
if(definition) {
nodeProperty = definition.property(propName);
if(!nodeProperty){
nodeProperty = _.find((<def.NodeClass>definition).allCustomProperties(),x=>x.nameId()==propName);
}
}
if (!node[propName]) {
this.missingProperties.addProperty(props[propName], node.kind());
return;
}
var property = props[propName];
let originalValue = node[propName]();
var value = originalValue;
if (value && propName == "structuredType" && typeof value === "object") {
value = null;
var highLevelNode = (<hl.IHighLevelNode>node.highLevel());
var a = (<hl.IHighLevelNode>highLevelNode).lowLevel();
var tdl = null;
a.children().forEach(x=> {
if (x.key() == "type" || x.key() == "schema") {
var td = highLevelNode.definition().universe().type(universe.Universe10.TypeDeclaration.name);
var hasType = highLevelNode.definition().universe().type(universe.Universe10.TypeDeclaration.name);
var tNode = new hlImpl.ASTNodeImpl(x, highLevelNode, td, hasType.property(x.key()))
tNode.patchType(builder.doDescrimination(tNode));
value = this.dumpInternal(tNode.wrapperNode(),false, false,nodeProperty);
propName = x.key();
}
})
}
else if (propName == "items"&& typeof value === "object") {
var isArr = Array.isArray(value);
var isObj = !isArr;
if(isArr){
isObj = _.find(value,x=>typeof(x)=="object")!=null;
}
if(isObj) {
value = null;
var highLevelNode = (<hl.IHighLevelNode>node.highLevel());
var a = (<hl.IHighLevelNode>highLevelNode).lowLevel();
var tdl = null;
a.children().forEach(x=> {
if (x.key() == "items") {
var td = highLevelNode.definition().universe().type(universe.Universe10.TypeDeclaration.name);
var hasType = highLevelNode.definition().universe().type(universe.Universe10.ArrayTypeDeclaration.name);
let tNode:hlImpl.ASTNodeImpl;
let llNode = x;
let itemsLocalType:nominals.ITypeDefinition;
let stop:boolean;
do {
stop = true;
tNode = new hlImpl.ASTNodeImpl(llNode, highLevelNode, td, hasType.property(universe.Universe10.ArrayTypeDeclaration.properties.items.name));
itemsLocalType = tNode.localType();
if(itemsLocalType && isEmpty(itemsLocalType)&& itemsLocalType.superTypes().length==1){
let tChildren = llNode.children().filter(y=>y.key()=="type");
if(tChildren.length==1){
if(tChildren[0].resolvedValueKind()==yaml.Kind.SCALAR){
value = tChildren[0].value();
break;
}
else{
llNode = tChildren[0];
stop = false;
}
}
}
} while ( !stop );
if(value==null) {
tNode.patchType(builder.doDescrimination(tNode));
value = this.dumpInternal(tNode.wrapperNode(), false, false, nodeProperty);
}
if(itemsLocalType.getExtra(tsInterfaces.TOP_LEVEL_EXTRA)
&& typeof value == "object" && ! Array.isArray(value)){
value = value.type;
}
if(Array.isArray(value)&&value.length==1){
value = value[0]
}
propName = x.key();
}
})
}
}
let hasValue = Array.isArray(value) ? (value.length>0) : (value != null);
if(definition && definition.isAssignableFrom("TypeDeclaration")
&& (propName === "type" || propName == "schema") && hasValue) {
if (Array.isArray(value)) {
let val = value.length > 0 ? value[0] : null;
if(val==null){
const defaultsCalculator = (<core.BasicNodeImpl>node).getDefaultsCalculator();
if(defaultsCalculator) {
let typeProp = def.getUniverse("RAML10").type("TypeDeclaration").property("type");
val = defaultsCalculator.attributeDefaultIfEnabled(node.highLevel().asElement(),typeProp);
}
value[0]=val;
}
var highLevelNode = (<hl.IHighLevelNode>node.highLevel());
var runtimeType = highLevelNode.localType();
let canBeJson = false;
let canBeXml = false;
if (runtimeType && runtimeType.hasExternalInHierarchy()) {
var schemaString = val.trim();
canBeJson = (schemaString[0] === "{" && schemaString[schemaString.length - 1] === "}");
canBeXml = (schemaString[0] === "<" && schemaString[schemaString.length - 1] === ">");
}
if (canBeJson) {
obj["typePropertyKind"] = "JSON";
} else if (canBeXml) {
obj["typePropertyKind"] = "XML";
} else {
obj["typePropertyKind"] = "TYPE_EXPRESSION";
}
} else if (typeof value === "object"){
obj["typePropertyKind"] = "INPLACE";
}
}
if((propName === "type" || propName == "schema") && value && value.forEach && typeof value[0] === "string") {
var schemaString = value[0].trim();
var canBeJson = this.stringLooksLikeJSON(schemaString);
var canBeXml= this.stringLooksLikeXML(schemaString);
if(canBeJson || canBeXml) {
var include = node.highLevel().lowLevel().includePath && node.highLevel().lowLevel().includePath();
if(!include&&node.highLevel().isElement()){
var typeAttr = node.highLevel().asElement().attr("type");
if(!typeAttr){
typeAttr = node.highLevel().asElement().attr("schema");
}
if(typeAttr){
include = typeAttr.lowLevel().includePath && typeAttr.lowLevel().includePath();
}
}
if(include) {
var ind = include.indexOf("#");
var postfix = "";
if(ind>=0){
postfix = include.substring(ind);
include = include.substring(0,ind);
}
var aPath = node.highLevel().lowLevel().unit().resolve(include).absolutePath();
var relativePath;
if(aPath.indexOf("http://") === 0 || aPath.indexOf("https://") === 0) {
relativePath = aPath;
} else {
relativePath = pathUtils.relative((<any>node).highLevel().lowLevel().unit().project().getRootPath(), aPath);
}
relativePath = relativePath.replace(/\\/g,'/');
let schemaPath = relativePath + postfix;
obj["schemaPath"] = schemaPath;
if(this.options.sourceMap) {
let sourceMap = obj.sourceMap;
if (!sourceMap) {
sourceMap = {};
obj.sourceMap = sourceMap;
}
sourceMap.path = schemaPath;
}
}
}
}
if (!value && propName == "type") {
return
//we should not use
}
if (!value && propName == "schema") {
return;
//we should not use
}
if (!value && propName == "items") {
return;
//we should not use
}
if ((<coreApi.BasicNode>node).definition
&& universeHelpers.isTypeDeclarationDescendant((<coreApi.BasicNode>node).definition())
&& universeHelpers.isTypeProperty(property)) {
//custom handling of not adding "type" property to the types having "schema" inside, even though the property actually exist,
// thus making "type" and "schema" arrays mutually exclusive in JSON.
var schemaValue = node[universe.Universe10.TypeDeclaration.properties.schema.name]();
if (schemaValue != null && (!Array.isArray(schemaValue) || schemaValue.length != 0)) {
return;
}
var highLevelNode = (<hl.IHighLevelNode>node.highLevel());
var a = (<hl.IHighLevelNode>highLevelNode).lowLevel();
var tdl = null;
var hasSchema = false;
a.children().forEach(x=> {
if (x.key() == "schema") {
hasSchema = true;
return;
}
})
if (hasSchema) {
return;
}
}
if (Array.isArray(value)) {
var propertyValue:any[] = [];
for (var val of value) {
var dumped = this.dumpInternal(val);
if (propName === 'examples' && this.options && this.options.dumpXMLRepresentationOfExamples && val.expandable && val.expandable._owner) {
(<any>dumped).asXMLString = val.expandable.asXMLString();
}
propertyValue.push(dumped);
}
if (propertyValue.length == 0 && core.BasicNodeImpl.isInstance(node) && !this.isDefined(node, propName)) {
return;
}
for (var x of this.nodePropertyTransformers) {
if (x.match(node, property)) {
propertyValue = x.transform(propertyValue, node);
}
}
obj[propName] = propertyValue;
if(props[propName].isValueProperty()&&originalValue){
let sPaths:string[] = [];
for(let a of originalValue){
if(core.AttributeNodeImpl.isInstance(a)){
let attr = a.highLevel();
let sPath = hlImpl.actualPath(attr, true);
sPaths.push(sPath);
}
else{
sPaths.push(null);
}
}
if(sPaths.filter(x=>x!=null).length>0){
scalarsSources[propName] = sPaths.map(x=>{return {path:x}});
}
}
}
else {
var val = this.dumpInternal(value);
if(core.TypeInstanceImpl.isInstance(value)){
obj[propName] = val;
return;
}
if (val == null && core.BasicNodeImpl.isInstance(node) && !this.isDefined(node, propName)) {
return;
}
if (core.BasicNodeImpl.isInstance(node)) {
this.nodePropertyTransformers.forEach(x=> {
if (x.match(node, property)) {
val = x.transform(val, node);
}
});
}
obj[propName] = val;
if (propName === 'example' && this.options && this.options.dumpXMLRepresentationOfExamples && value.expandable && value.expandable._owner) {
(<any>val).asXMLString = value.expandable.asXMLString();
}
if(originalValue && props[propName].isValueProperty()){
let v = Array.isArray(originalValue) ? originalValue[0] : originalValue;
if(core.AttributeNodeImpl.isInstance(v)) {
let attr = v.highLevel();
if (!attr.isFromKey()) {
let sPath = hlImpl.actualPath(attr, true);
if (sPath) {
scalarsSources[propName] = [{path: sPath}];
}
}
}
}
}
});
if (this.options.sourceMap && Object.keys(scalarsSources).length > 0) {
let sourceMap = obj.sourceMap;
if(!sourceMap){
sourceMap = {};
obj.sourceMap = sourceMap;
}
sourceMap.scalarsSources = scalarsSources;
}
return obj;
}
serializeScalarsAnnotations(obj:any, node:coreApi.BasicNode|coreApi.AttributeNode, props:{[key:string]:nominals.IProperty}) {
if (node["scalarsAnnotations"]) {
var val = {};
var accessor = node["scalarsAnnotations"]();
for (var propName of Object.keys(props)) {
if (accessor[propName]) {
var arr:any[] = accessor[propName]();
if (arr.length > 0) {
if (Array.isArray(arr[0])) {
var arr1 = [];
arr.forEach((x, i)=> {
arr1.push(x.map(y=>this.dumpInternal(y)))
});
if (arr1.filter(x=>x.length > 0).length > 0) {
val[propName] = arr1;
}
}
else {
val[propName] = arr.map(x=>this.dumpInternal(x));
}
}
}
}
if (Object.keys(val).length > 0) {
obj["scalarsAnnotations"] = val;
}
}
}
serializeMeta(obj:any, node:coreApi.BasicNode|coreApi.AttributeNode) {
if (!this.options.serializeMetadata) {
return;
}
var meta = node.meta();
if (!meta.isDefault()) {
obj["__METADATA__"] = meta.toJSON();
}
}
serializeTypeInstance(inst:core.TypeInstanceImpl):any {
if (inst.isScalar()) {
return inst.value();
}
else if (inst.isArray()) {
return inst.items().map(x=>this.serializeTypeInstance(x));
}
else {
var props = inst.properties();
if (props.length == 0) {
return null;
}
var obj:any = {};
props.forEach(x=>obj[x.name()] = this.serializeTypeInstanceProperty(x));
return obj;
}
}
serializeTypeInstanceProperty(prop:core.TypeInstancePropertyImpl):any {
if (prop.isArray()) {
var values = prop.values();
//if(values.length==0){
// return null;
//}
var arr:any[] = [];
values.forEach(x=>arr.push(this.serializeTypeInstance(x)));
return arr;
}
else {
return this.serializeTypeInstance(prop.value());
}
}
isDefined(node, name) {
var hl = node.highLevel();
if (hl.elementsOfKind(name).length > 0) {
return true;
}
if (hl.attributes(name).length > 0) {
return true;
}
return false;
}
}
interface Transformation{
match(node:coreApi.BasicNode|coreApi.AttributeNode,prop:nominals.IProperty):boolean
transform(value:any,node?:coreApi.BasicNode|coreApi.AttributeNode)
}
interface ObjectPropertyMatcher{
match(td:nominals.ITypeDefinition,prop:nominals.IProperty):boolean
}
class BasicObjectPropertyMatcher implements ObjectPropertyMatcher{
constructor(
protected typeMatcher: (x:nominals.ITypeDefinition)=>boolean,
protected propMatcher: (x:nominals.IProperty)=>boolean
){}
match(td:nominals.ITypeDefinition,prop:nominals.IProperty):boolean{
return (td==null||this.typeMatcher(td))&&((prop==null) || this.propMatcher(prop));
}
}
class CompositeObjectPropertyMatcher implements ObjectPropertyMatcher{
constructor(protected matchers:ObjectPropertyMatcher[]){}
match(td:nominals.ITypeDefinition,prop:nominals.IProperty):boolean{
var l = this.matchers.length;
for(var i = 0 ; i < l ; i++){
if(this.matchers[i].match(td,prop)){
return true;
}
}
return false;
}
}
class ArrayToMapTransformer implements Transformation{
constructor(protected matcher:ObjectPropertyMatcher, protected propName:string){}
match(node:coreApi.BasicNode,prop:nominals.IProperty):boolean{
return this.matcher.match(node.definition(),prop);
}
transform(value:any){
if(Array.isArray(value)&&value.length>0 && value[0][this.propName]){
var obj = {};
value.forEach(x=>{
var key = x[this.propName];
var previous = obj[key];
if(previous){
if(Array.isArray(previous)){
previous.push(x);
}
else{
obj[key] = [ previous, x ];
}
}
else {
obj[key] = x;
}
});
return obj;
}
return value;
}
}
class ArrayToMappingsArrayTransformer implements Transformation{
constructor(protected matcher:ObjectPropertyMatcher, protected propName:string){}
match(node:coreApi.BasicNode,prop:nominals.IProperty):boolean{
return this.matcher.match(node.definition ? node.definition():null,prop);
}
transform(value:any){
if(Array.isArray(value)){
return value;
}
else {
var obj = {};
obj[value[this.propName]] = value;
return obj;
}
}
}
class ParametersTransformer extends ArrayToMapTransformer{
constructor(){
super(new CompositeObjectPropertyMatcher([
new BasicObjectPropertyMatcher(universeHelpers.isApiSibling,universeHelpers.isBaseUriParametersProperty),
new BasicObjectPropertyMatcher(universeHelpers.isResourceBaseSibling,universeHelpers.isUriParametersProperty),
new BasicObjectPropertyMatcher(universeHelpers.isResourceBaseSibling,universeHelpers.isQueryParametersProperty),
new BasicObjectPropertyMatcher(universeHelpers.isTraitType,universeHelpers.isQueryParametersProperty),
new BasicObjectPropertyMatcher(universeHelpers.isMethodType,universeHelpers.isQueryParametersProperty),
new BasicObjectPropertyMatcher(universeHelpers.isSecuritySchemePartType,universeHelpers.isQueryParametersProperty),
new BasicObjectPropertyMatcher(universeHelpers.isTraitType,universeHelpers.isHeadersProperty),
new BasicObjectPropertyMatcher(universeHelpers.isMethodType,universeHelpers.isHeadersProperty),
new BasicObjectPropertyMatcher(universeHelpers.isSecuritySchemePartType,universeHelpers.isHeadersProperty),
new BasicObjectPropertyMatcher(universeHelpers.isResponseType,universeHelpers.isHeadersProperty),
new BasicObjectPropertyMatcher(universeHelpers.isBodyLikeType,universeHelpers.isFormParametersProperty)
]),"name");
}
}
class TypesTransformer extends ArrayToMappingsArrayTransformer{
constructor(){
super(new CompositeObjectPropertyMatcher([
new BasicObjectPropertyMatcher(universeHelpers.isLibraryBaseSibling,universeHelpers.isTypesProperty),
new BasicObjectPropertyMatcher(
x=>universeHelpers.isLibraryBaseSibling(x)&&universeHelpers.isRAML10Type(x)
,universeHelpers.isSchemasProperty)
]),"name");
}
match(node:coreApi.BasicNode,prop:nominals.IProperty):boolean {
var res = node.parent()!=null && this.matcher.match(node.parent().definition(),prop);
if(res) {
return true;
} else {
return false;
}
}
}
class UsesTransformer extends ArrayToMapTransformer{
constructor(){
super(new CompositeObjectPropertyMatcher([
new BasicObjectPropertyMatcher(universeHelpers.isLibraryBaseSibling,universeHelpers.isUsesProperty)
]),"name");
}
}
class PropertiesTransformer extends ArrayToMapTransformer{
constructor(){
super(new CompositeObjectPropertyMatcher([
new BasicObjectPropertyMatcher(universeHelpers.isObjectTypeDeclarationSibling,universeHelpers.isPropertiesProperty)
]),"name");
}
}
class ResponsesTransformer extends ArrayToMapTransformer{
constructor(){
super(new CompositeObjectPropertyMatcher([
new BasicObjectPropertyMatcher(universeHelpers.isMethodBaseSibling,universeHelpers.isResponsesProperty)
]),"code");
}
}
class AnnotationsTransformer extends ArrayToMapTransformer{
constructor(){
super(new CompositeObjectPropertyMatcher([
new BasicObjectPropertyMatcher(x=>true,universeHelpers.isAnnotationsProperty)
]),"name");
}
}
class BodiesTransformer extends ArrayToMapTransformer{
constructor(){
super(new CompositeObjectPropertyMatcher([
new BasicObjectPropertyMatcher(universeHelpers.isResponseType,universeHelpers.isBodyProperty),
new BasicObjectPropertyMatcher(universeHelpers.isMethodBaseSibling,universeHelpers.isBodyProperty)
]),"name");
}
}
class TraitsTransformer extends ArrayToMappingsArrayTransformer{
constructor(){
super(new CompositeObjectPropertyMatcher([
new BasicObjectPropertyMatcher(universeHelpers.isTraitType,universeHelpers.isTraitsProperty)
]),"name");
}
}
class ResourceTypesTransformer extends ArrayToMappingsArrayTransformer{
constructor(){
super(new CompositeObjectPropertyMatcher([
new BasicObjectPropertyMatcher(universeHelpers.isResourceTypeType,universeHelpers.isResourceTypesProperty)
]),"name");
}
transform(value:any){
var methodsPropertyName = universes.Universe10.ResourceBase.properties.methods.name;
if(Array.isArray(value)) {
return value;
}
else{
var methods = value[methodsPropertyName];
if (methods) {
methods.forEach(m=> {
var keys = Object.keys(m);
if (keys.length > 0) {
var methodName = keys[0];
value[methodName] = m[methodName];
}
})
}
delete value[methodsPropertyName];
return super.transform(value);
}
}
}
class FacetsTransformer extends ArrayToMapTransformer{
constructor(){
super(new CompositeObjectPropertyMatcher([
new BasicObjectPropertyMatcher(universeHelpers.isTypeDeclarationSibling,universeHelpers.isFacetsProperty)
]),"name");
}
}
class SecuritySchemesTransformer extends ArrayToMappingsArrayTransformer{
constructor(){
super(null,"name");
}
match(node:coreApi.BasicNode,prop:nominals.IProperty):boolean{
return prop != null && universeHelpers.isSecuritySchemesProperty(prop);
}
}
class AnnotationTypesTransformer extends ArrayToMappingsArrayTransformer{
constructor(){
super(new CompositeObjectPropertyMatcher([
new BasicObjectPropertyMatcher(universeHelpers.isLibraryBaseSibling,universeHelpers.isAnnotationTypesProperty)
]),"name");
}
match(node:coreApi.BasicNode,prop:nominals.IProperty):boolean{
return node.parent()!=null && this.matcher.match(node.parent().definition(),prop);
}
}
class ResourceTypeMethodsToMapTransformer extends ArrayToMappingsArrayTransformer{
constructor(){
super(null,"method");
}
match(node:coreApi.BasicNode,prop:nominals.IProperty):boolean{
return node.parent()!=null
&& universeHelpers.isResourceTypeType(node.parent().definition())
&& universeHelpers.isMethodsProperty(prop);
}
}
var exampleNameProp = universe.Universe10.ExampleSpec.properties.name.name;
var exampleContentProp = universe.Universe10.ExampleSpec.properties.value.name;
var exampleStructuredContentProp = "structuredContent";
class ExamplesTransformer implements Transformation{
match(node:coreApi.BasicNode,prop:nominals.IProperty):boolean{
return universeHelpers.isExampleSpecType(node.definition());
}
transform(value:any){
if(Array.isArray(value)&&value.length>0){
if(value[0][exampleNameProp]){
var obj = {};
value.forEach(x=>obj[x[exampleNameProp]]=this.getActualExample(x));
return obj;
}
else{
var arr = value.map(x=>this.getActualExample(x));
return arr;
}
}
else {
return value;
}
}
private getActualExample(exampleSpecObj):any{
if(exampleSpecObj[exampleStructuredContentProp]){
return exampleSpecObj[exampleStructuredContentProp];
}
return exampleSpecObj[exampleContentProp];
}
}
class TypeExampleTransformer implements Transformation{
match(node:coreApi.BasicNode,prop:nominals.IProperty):boolean{
return node.definition && universeHelpers.isTypeDeclarationSibling(node.definition());
}
transform(value:any){
var isArray = Array.isArray(value);
var arr = isArray ? value : [ value ];
arr.forEach(x=>{
var structuredExample = x['example'];
if(structuredExample){
x['example'] = structuredExample.structuredValue;
x['structuredExample'] = structuredExample;
}
});
return isArray ? arr : arr[0];
}
}
class TypeDefaultValueTransformer implements Transformation{
match(node:coreApi.BasicNode,prop:nominals.IProperty):boolean{
return node.definition && universeHelpers.isTypeDeclarationSibling(node.definition());
}
transform(value:any,node?:coreApi.BasicNode|coreApi.AttributeNode){
var hlNode = node.highLevel()
if(hlNode==null||!hlNode.isElement()){
return value;
}
var element = hlNode.asElement()
let propName = universe.Universe10.TypeDeclaration.properties.default.name;
var attr = element.attr(propName)
if(attr == null || attr.value() != null){
return value;
}
var isArray = Array.isArray(value);
var arr = isArray ? value : [ value ];
arr.forEach(x=>{
if(x[propName] == null){
var dVal = attr.lowLevel().dumpToObject()
if(typeof dVal == "object" && dVal.hasOwnProperty(propName)){
x[propName] = dVal[propName]
}
else {
x[propName] = dVal
}
}
});
return isArray ? arr : arr[0];
}
}
class SchemasTransformer implements Transformation{
protected matcher = new BasicObjectPropertyMatcher(
x=>universeHelpers.isApiType(x)&&universeHelpers.isRAML08Type(x),universeHelpers.isSchemasProperty);
match(node:coreApi.BasicNode|coreApi.AttributeNode,prop:nominals.IProperty):boolean{
return node.parent()!=null&&this.matcher.match(node.parent().definition(),prop);
}
transform(value:any){
if(Array.isArray(value)){
return value;
}
else {
var obj:any = {};
obj[value.key] = value.value;
if(value.sourceMap && value.sourceMap.path){
obj.sourceMap = {
path: value.sourceMap.path
};
}
return obj;
}
}
private getActualExample(exampleSpecObj):any{
if(exampleSpecObj[exampleStructuredContentProp]){
return exampleSpecObj[exampleStructuredContentProp];
}
return exampleSpecObj[exampleContentProp];
}
}
class ProtocolsToUpperCaseTransformer implements Transformation{
match(node:coreApi.BasicNode,prop:nominals.IProperty):boolean{
return prop!=null && universeHelpers.isProtocolsProperty(prop);
}
transform(value:any){
if(typeof(value)=='string'){
return value.toUpperCase();
}
else if(Array.isArray(value)){
return value.map(x=>x.toUpperCase());
}
return value;
}
}
class OneElementArrayTransformer implements Transformation{
usecases:ObjectPropertyMatcher = new CompositeObjectPropertyMatcher([
new BasicObjectPropertyMatcher(universeHelpers.isApiSibling,universeHelpers.isMediaTypeProperty)
]);
match(node:coreApi.BasicNode,prop:nominals.IProperty):boolean{
return this.usecases.match(node.definition(),prop);
}
transform(value:any){
if(Array.isArray(value) && value.length==1){
return value[0];
}
return value;
}
}
class ResourcesTransformer implements Transformation{
match(node:coreApi.BasicNode,prop:nominals.IProperty):boolean{
return prop!=null && universeHelpers.isResourcesProperty(prop);
}
transform(value:any,node){
if(Array.isArray(value)){
return value;
}
var relUri = value[universes.Universe10.Resource.properties.relativeUri.name];
if(relUri){
var segments = relUri.trim().split("/");
while(segments.length > 0 && segments[0].length == 0){
segments.shift();
}
value["relativeUriPathSegments"] = segments;
value.absoluteUri = (<RamlWrapper10.Resource>node).absoluteUri();
}
return value;
}
}
class SimpleNamesTransformer implements Transformation{
match(node:coreApi.BasicNode,prop:nominals.IProperty):boolean{
if(!node.parent() || !node.parent().highLevel().lowLevel()["libProcessed"]){
return false;
}
return universeHelpers.isAnnotationTypesProperty(prop)
|| universeHelpers.isTypesProperty(prop)
|| universeHelpers.isResourceTypesProperty(prop)
|| universeHelpers.isTraitsProperty(prop)
|| universeHelpers.isSecuritySchemesProperty(prop);
}
transform(value:any,node?:coreApi.BasicNode|coreApi.AttributeNode){
var llNode = node.highLevel().lowLevel();
var key = llNode.key();
var original:ll.ILowLevelASTNode = llNode;
while(proxy.LowLevelProxyNode.isInstance(original)){
original = (<proxy.LowLevelProxyNode>original).originalNode();
}
var oKey = original.key();
var aVal = value[Object.keys(value)[0]];
aVal.name = oKey;
if(aVal.displayName==key){
aVal.displayName = oKey;
}
return value;
}
}
class TemplateParametrizedPropertiesTransformer implements Transformation{
match(node:coreApi.BasicNode,prop:nominals.IProperty):boolean{
var hlNode = node.highLevel();
if(!hlNode){
return false;
}
var d = hlNode.definition();
if(!d){
return false;
}
return universeHelpers.isResourceTypeType(d)
|| universeHelpers.isTraitType(d)
|| universeHelpers.isMethodType(d)
|| universeHelpers.isResponseType(d)
|| universeHelpers.isParameterDescendant(d)
|| universeHelpers.isBodyLikeType(d)
|| universeHelpers.isTypeDeclarationDescendant(d);
}
transform(value:any){
if(Array.isArray(value)){
return value;
}
var propName = universe.Universe10.Trait.properties.parametrizedProperties.name;
var parametrizedProps = value[propName];
if(parametrizedProps){
Object.keys(parametrizedProps).forEach(y=>{
value[y] = parametrizedProps[y];
});
delete value[propName];
}
return value;
}
}
class ReferencesTransformer implements Transformation{
match(node:coreApi.BasicNode,prop:nominals.IProperty):boolean{
return prop!=null && (
universeHelpers.isSecuredByProperty(prop)
||universeHelpers.isIsProperty(prop)
||(node.parent()!=null&&(universeHelpers.isResourceType(node.parent().highLevel().definition())
||universeHelpers.isResourceTypeType(node.parent().highLevel().definition()))
&&universeHelpers.isTypeProperty(prop))
);
}
transform(value:any){
if(!value){
return null;
}
if(Array.isArray(value)){
return value;
}
return this.toSimpleValue(value);
}
private toSimpleValue(x):any {
var name = x['name'];
var params = x['structuredValue'];
if (params) {
var obj = {};
obj[name] = params;
return obj;
}
else {
return name;
}
}
}
class ArrayExpressionTransformer implements Transformation{
match(node:coreApi.BasicNode|coreApi.AttributeNode,prop:nominals.IProperty):boolean{
if(!(core.BasicNodeImpl.isInstance(node))){
return false;
}
var hlNode = (<coreApi.BasicNode>node).highLevel();
var definition = hlNode.definition();
if(!universeHelpers.isTypeDeclarationDescendant(definition)){
return false;
}
var lType = hlNode.localType();
if(!lType || !lType.isArray()){
return false;
}
return true;
}
transform(value:any){
var typePropName = universes.Universe10.TypeDeclaration.properties.type.name;
var schemaPropName = universes.Universe10.TypeDeclaration.properties.schema.name;
var itemsPropName = universes.Universe10.ArrayTypeDeclaration.properties.items.name;
var tValue = value[typePropName];
if(!tValue){
tValue = value[schemaPropName];
}
if(tValue.length==1&&util.stringEndsWith(tValue[0],"[]")) {
if(value[itemsPropName]==null) {
value[itemsPropName] = tValue[0].substring(0, tValue[0].length - 2);
}
tValue[0] = "array";
}
return value;
}
}
class PropertiesData{
typeName:string;
map:{[key:string]:TypePropertiesData} = {}
addProperty(prop:nominals.IProperty,wrapperKind:string){
var data = this.map[wrapperKind];
if(!data){
data = new TypePropertiesData(wrapperKind);
this.map[wrapperKind] = data;
}
data.addProperty(prop);
}
print():string{
return Object.keys(this.map).map(x=>this.map[x].print()).join('\n') + "\n";
}
}
class TypePropertiesData{
constructor(protected typeName:string){}
map:{[key:string]:TypePropertiesData2} = {};
addProperty(prop:nominals.IProperty){
var name = prop.domain().nameId();
var data = this.map[name];
if(!data){
data = new TypePropertiesData2(name);
this.map[name] = data;
}
data.addProperty(prop);
}
print():string{
return this.typeName + ':\n' +Object.keys(this.map).map(x=>' '+this.map[x].print()).join('\n');
}
}
class TypePropertiesData2{
constructor(protected typeName:string){}
map:{[key:string]:nominals.IProperty} = {};
addProperty(prop:nominals.IProperty){
var name = prop.nameId();
this.map[name] = prop;
}
print():string{
return this.typeName + ': ' +Object.keys(this.map).sort().join(', ');
}
}
export interface SerializeOptions{
/**
* For root nodes additional details can be included into output. If the option is set to `true`,
* node content is returned as value of the **specification** root property. Other root properties are:
*
* * **ramlVersion** version of RAML used by the specification represented by the node
* * **type** type of the node: Api, Overlay, Extension, Library, or any other RAML type in fragments case
* * **errors** errors of the specification represented by the node
* @default false
*/
rootNodeDetails?:boolean
/**
* Whether to serialize metadata
* @default true
*/
serializeMetadata?:boolean
dumpXMLRepresentationOfExamples?:boolean
dumpSchemaContents?:boolean
/**
* Whether to serialize source maps
*/
sourceMap?: boolean
}
export function isEmpty(nc:nominals.ITypeDefinition):boolean{
if(nc.properties().length){
return false;
}
if(nc.annotations().length){
return false;
}
if(nc.facets().length){
return false;
}
const fixedFacets = nc.fixedFacets();
if(fixedFacets && Object.keys(fixedFacets).length){
return false;
}
const fixedBuiltinFacets = nc.fixedBuiltInFacets();
if(fixedBuiltinFacets && Object.keys(fixedBuiltinFacets).length){
return false;
}
if(nc.description()){
return false;
}
return true;
}
function isFragment(node:hl.IHighLevelNode):boolean {
if(!node.parent()){
return true;
}
if(node.lowLevel()["libProcessed"]){
return false;
}
if(!universeHelpers.isLibraryType(node.root().definition())){
return false;
}
const includePath = node.lowLevel().includePath();
if(includePath==null){
return false;
}
const resolvedUnit = node.lowLevel().unit().resolve(includePath);
if(resolvedUnit&&resolvedUnit.contents().startsWith("#%RAML")){
return true;
}
return false;
} | the_stack |
import * as chai from 'chai';
import { DefaultBrowserBehavior } from '../../src';
import DefaultAudioMixController from '../../src/audiomixcontroller/DefaultAudioMixController';
import NoOpLogger from '../../src/logger/NoOpLogger';
import DOMMockBehavior from '../dommock/DOMMockBehavior';
import DOMMockBuilder from '../dommock/DOMMockBuilder';
describe('DefaultAudioMixController', () => {
const expect: Chai.ExpectStatic = chai.expect;
const logger = new NoOpLogger();
let domMockBuilder: DOMMockBuilder;
let behavior: DOMMockBehavior;
let defaultAudioMixController: DefaultAudioMixController;
let element: HTMLAudioElement;
let device: MediaDeviceInfo;
let stream: MediaStream;
beforeEach(() => {
behavior = new DOMMockBehavior();
domMockBuilder = new DOMMockBuilder(behavior);
defaultAudioMixController = new DefaultAudioMixController(logger);
element = new HTMLAudioElement();
device = new MediaDeviceInfo();
// @ts-ignore
device.deviceId = 'device-id';
stream = new MediaStream();
});
afterEach(() => {
domMockBuilder.cleanup();
});
it('can be constructed', () => {
expect(defaultAudioMixController).to.not.equal(null);
});
it('can bind an audio element, but not yet sink', async () => {
// expect(await defaultAudioMixController.bindAudioElement(element)).to.equal(true);
element.autoplay = false;
try {
await defaultAudioMixController.bindAudioElement(element);
} catch (e) {
throw new Error('This line should not be reached.');
}
expect(element.autoplay).to.equal(true);
});
it('can fail to bind an audio element when element does not exist', async () => {
try {
await defaultAudioMixController.bindAudioElement(null);
throw new Error('This line should not be reached.');
} catch (e) {
expect(e.message).include('Cannot bind audio element');
}
});
it('can unbind an audio element when element does not exist', async () => {
await defaultAudioMixController.bindAudioDevice(device);
await defaultAudioMixController.bindAudioStream(stream);
defaultAudioMixController.unbindAudioElement();
try {
await defaultAudioMixController.bindAudioElement(element);
} catch (e) {
throw new Error('This line should not be reached.');
}
defaultAudioMixController.unbindAudioElement();
try {
await defaultAudioMixController.bindAudioStream(stream);
} catch (e) {
expect(e.messaage).to.include('bla');
}
});
it('can successfully bind and sink an audio element', async () => {
//@ts-ignore
await element.setSinkId('anything');
//@ts-ignore
expect(element.sinkId).to.equal('anything');
await defaultAudioMixController.bindAudioDevice(device);
await defaultAudioMixController.bindAudioStream(stream);
element.autoplay = false;
try {
await defaultAudioMixController.bindAudioElement(element);
} catch (e) {
throw new Error('This line should not be reached.');
}
expect(element.autoplay).to.equal(true);
// @ts-ignore
expect(element.sinkId).to.equal('device-id');
});
it('Does not throw error on failure to bindAudioStream. Just logs error. (with logger)', async () => {
behavior.setSinkIdSupported = false;
domMockBuilder = new DOMMockBuilder(behavior);
defaultAudioMixController = new DefaultAudioMixController(logger);
await defaultAudioMixController.bindAudioElement(element);
//@ts-ignore
await element.setSinkId(undefined);
try {
await defaultAudioMixController.bindAudioStream(stream);
} catch (e) {
throw new Error('This line should not be reached.');
}
defaultAudioMixController = new DefaultAudioMixController(logger);
try {
await defaultAudioMixController.bindAudioStream(stream);
} catch (e) {
throw new Error('This line should not be reached.');
}
defaultAudioMixController = new DefaultAudioMixController();
//@ts-ignore
await element.setSinkId('sink-id');
await defaultAudioMixController.bindAudioElement(element);
//@ts-ignore
await element.setSinkId(undefined);
try {
await defaultAudioMixController.bindAudioStream(stream);
} catch (e) {
throw new Error('This line should not be reached.');
}
});
it('does nothing when trying to bind an audio stream and the stream does not exist', async () => {
try {
await defaultAudioMixController.bindAudioStream(null);
} catch (e) {
throw new Error('This line should not be reached.');
}
});
it('can successfully bind and sink an audio stream', async () => {
try {
await defaultAudioMixController.bindAudioDevice(device);
await defaultAudioMixController.bindAudioElement(element);
await defaultAudioMixController.bindAudioStream(stream);
} catch (e) {
throw new Error('This line should not be reached.');
}
});
it('throws an error if browser does not support setSinkId', async () => {
behavior.setSinkIdSupported = false;
domMockBuilder = new DOMMockBuilder(behavior);
expect(new DefaultBrowserBehavior().supportsSetSinkId()).to.be.false;
defaultAudioMixController = new DefaultAudioMixController(logger);
// First we need an element to bind to.
await defaultAudioMixController.bindAudioElement(element);
try {
await defaultAudioMixController.bindAudioDevice(device);
throw new Error('This line should not be reached.');
} catch (e) {
expect(e.message).include(
'Cannot select audio output device. This browser does not support setSinkId.'
);
}
});
it('can successfully bind and sink an audio stream in Chromium based browser', async () => {
behavior.browserName = 'chrome';
domMockBuilder = new DOMMockBuilder(behavior);
defaultAudioMixController = new DefaultAudioMixController(logger);
try {
await defaultAudioMixController.bindAudioDevice(device);
await defaultAudioMixController.bindAudioElement(element);
await defaultAudioMixController.bindAudioStream(stream);
} catch (e) {
throw new Error('This line should not be reached.');
}
expect(element.srcObject).to.equal(stream);
});
it('can bind an audio device, but not yet sink', async () => {
// @ts-ignore
await element.setSinkId('random-id');
await defaultAudioMixController.bindAudioElement(element);
await defaultAudioMixController.bindAudioDevice(device);
// @ts-ignore
expect(element.sinkId).to.equal('device-id');
// @ts-ignore
await element.setSinkId('random-id');
behavior.setSinkIdSucceeds = false;
domMockBuilder = new DOMMockBuilder(behavior);
defaultAudioMixController = new DefaultAudioMixController(logger);
try {
await defaultAudioMixController.bindAudioDevice(device);
} catch (e) {
throw new Error('This line should not be reached.');
}
// @ts-ignore
expect(element.sinkId).to.not.equal('device-id');
// @ts-ignore
expect(element.sinkId).to.equal('random-id');
});
it('does nothing if we try to bind an audio device when device does not exist', async () => {
try {
await defaultAudioMixController.bindAudioDevice(null);
} catch (e) {
throw new Error('This line should not be reached.');
}
});
it('can successfully bind and sink an audio device', async () => {
try {
await defaultAudioMixController.bindAudioStream(stream);
await defaultAudioMixController.bindAudioElement(element);
await defaultAudioMixController.bindAudioDevice(device);
} catch (e) {
throw new Error('This line should not be reached.');
}
});
it('can successfully set audio element and unbind element', async () => {
behavior.browserName = 'chrome';
domMockBuilder = new DOMMockBuilder(behavior);
defaultAudioMixController = new DefaultAudioMixController(logger);
await defaultAudioMixController.bindAudioElement(element);
defaultAudioMixController.unbindAudioElement();
expect(element.srcObject).to.equal(null);
});
it('can switch between audio elements and streams', async () => {
behavior.browserName = 'chrome';
domMockBuilder = new DOMMockBuilder(behavior);
defaultAudioMixController = new DefaultAudioMixController(logger);
const element2 = new HTMLAudioElement();
const stream2 = new MediaStream();
await defaultAudioMixController.bindAudioElement(element);
await defaultAudioMixController.bindAudioStream(stream);
await defaultAudioMixController.bindAudioElement(element2);
await defaultAudioMixController.bindAudioStream(stream2);
expect(element.srcObject).to.equal(stream);
expect(element2.srcObject).to.equal(stream2);
});
it('can bind and unbind between various audio elements and streams', async () => {
behavior.browserName = 'chrome';
domMockBuilder = new DOMMockBuilder(behavior);
defaultAudioMixController = new DefaultAudioMixController(logger);
const element2 = new HTMLAudioElement();
const stream2 = new MediaStream();
await defaultAudioMixController.bindAudioElement(element);
await defaultAudioMixController.bindAudioStream(stream);
defaultAudioMixController.unbindAudioElement();
await defaultAudioMixController.bindAudioElement(element2);
await defaultAudioMixController.bindAudioStream(stream2);
expect(element.srcObject).to.equal(null);
expect(element2.srcObject).to.equal(stream2);
});
it('can execute calls to the 3 bind methods consecutively in Chromium based browser', async () => {
behavior.browserName = 'chrome';
domMockBuilder = new DOMMockBuilder(behavior);
defaultAudioMixController = new DefaultAudioMixController(logger);
await defaultAudioMixController.bindAudioDevice(device);
await defaultAudioMixController.bindAudioElement(element);
await defaultAudioMixController.bindAudioStream(stream);
expect(element.srcObject).to.equal(stream);
});
it('can execute 3 bind and unbind methods consecutively in Chromium based browser', async () => {
behavior.browserName = 'chrome';
domMockBuilder = new DOMMockBuilder(behavior);
defaultAudioMixController = new DefaultAudioMixController(logger);
await defaultAudioMixController.bindAudioDevice(device);
await defaultAudioMixController.bindAudioElement(element);
await defaultAudioMixController.bindAudioStream(stream);
defaultAudioMixController.unbindAudioElement();
expect(element.srcObject).to.equal(null);
});
it('can bind an audio element when sinkId does not exist but we do not try to set one', async () => {
// @ts-ignore
await element.setSinkId(undefined);
await defaultAudioMixController.bindAudioElement(element);
});
it('can fail to bind an audio element when sinkId does not exist but our device has an ID', async () => {
// @ts-ignore
await element.setSinkId(undefined);
await defaultAudioMixController.bindAudioDevice(device);
try {
await defaultAudioMixController.bindAudioElement(element);
throw new Error('This line should not be reached.');
} catch (e) {
expect(e.message).to.include(
'Cannot select audio output device. This browser does not support setSinkId.'
);
}
});
it('throws an error when setSinkId call errors out existingAudioElement', async () => {
behavior.setSinkIdSucceeds = false;
behavior.browserName = 'chrome';
domMockBuilder = new DOMMockBuilder(behavior);
defaultAudioMixController = new DefaultAudioMixController(logger);
try {
await defaultAudioMixController.bindAudioElement(element);
throw new Error('This line should not be reached.');
} catch (e) {
expect(e.message).to.include('Failed to set sinkId');
}
defaultAudioMixController = new DefaultAudioMixController();
try {
await defaultAudioMixController.bindAudioElement(element);
throw new Error('This line should not be reached.');
} catch (e) {
expect(e.message).to.include('Failed to set sinkId');
}
});
it('throws an error when setSinkId call errors out for current audioElement', async () => {
behavior.setSinkIdSucceeds = false;
behavior.browserName = 'firefox';
domMockBuilder = new DOMMockBuilder(behavior);
defaultAudioMixController = new DefaultAudioMixController(logger);
try {
await defaultAudioMixController.bindAudioElement(element);
throw new Error('This line should not be reached.');
} catch (e) {
expect(e.message).to.include('Failed to set sinkId');
}
defaultAudioMixController = new DefaultAudioMixController();
try {
await defaultAudioMixController.bindAudioElement(element);
throw new Error('This line should not be reached.');
} catch (e) {
expect(e.message).to.include('Failed to set sinkId');
}
});
}); | the_stack |
import "jest-extended";
import { Container, Contracts } from "@packages/core-kernel/src";
import {
bridgechainIndexer,
businessIndexer,
MagistrateIndex,
} from "@packages/core-magistrate-transactions/src/wallet-indexes";
import { Wallet, WalletRepository } from "@packages/core-state/src/wallets";
import {
addressesIndexer,
ipfsIndexer,
locksIndexer,
publicKeysIndexer,
resignationsIndexer,
usernamesIndexer,
} from "@packages/core-state/src/wallets/indexers/indexers";
import { Utils } from "@packages/crypto/src";
import { setUp } from "../setup";
let walletRepo: WalletRepository;
beforeAll(async () => {
const initialEnv = await setUp();
initialEnv.sandbox.app
.bind<Contracts.State.WalletIndexerIndex>(Container.Identifiers.WalletRepositoryIndexerIndex)
.toConstantValue({
name: MagistrateIndex.Businesses,
indexer: businessIndexer,
autoIndex: true,
});
initialEnv.sandbox.app
.bind<Contracts.State.WalletIndexerIndex>(Container.Identifiers.WalletRepositoryIndexerIndex)
.toConstantValue({
name: MagistrateIndex.Bridgechains,
indexer: bridgechainIndexer,
autoIndex: true,
});
// TODO: why does this have to be rebound here?
initialEnv.sandbox.app.rebind(Container.Identifiers.WalletRepository).to(WalletRepository);
walletRepo = initialEnv.sandbox.app.getTagged(Container.Identifiers.WalletRepository, "state", "blockchain");
});
beforeEach(() => {
walletRepo.reset();
});
afterEach(() => {
jest.clearAllMocks();
jest.restoreAllMocks();
});
describe("Wallet Repository", () => {
it("should throw if indexers are already registered", () => {
expect(() => walletRepo.initialize()).toThrow("The wallet index is already registered: addresses");
});
it("should create a wallet", () => {
const wallet = walletRepo.createWallet("abcd");
expect(wallet.getAddress()).toEqual("abcd");
expect(wallet).toBeInstanceOf(Wallet);
});
it("should be able to look up indexers", () => {
const expected = [
"addresses",
"publicKeys",
"usernames",
"resignations",
"locks",
"ipfs",
"businesses",
"bridgechains",
];
expect(walletRepo.getIndexNames()).toEqual(expected);
expect(walletRepo.getIndex("addresses").indexer).toEqual(addressesIndexer);
expect(walletRepo.getIndex("publicKeys").indexer).toEqual(publicKeysIndexer);
expect(walletRepo.getIndex("usernames").indexer).toEqual(usernamesIndexer);
expect(walletRepo.getIndex("resignations").indexer).toEqual(resignationsIndexer);
expect(walletRepo.getIndex("locks").indexer).toEqual(locksIndexer);
expect(walletRepo.getIndex("ipfs").indexer).toEqual(ipfsIndexer);
expect(() => walletRepo.getIndex("iDontExist")).toThrow();
});
it("indexing should keep indexers in sync", () => {
const address = "ATtEq2tqNumWgR9q9zF6FjGp34Mp5JpKGp";
const wallet = walletRepo.createWallet(address);
const publicKey = "03720586a26d8d49ec27059bd4572c49ba474029c3627715380f4df83fb431aece";
wallet.setPublicKey(publicKey);
expect(walletRepo.findByAddress(address)).not.toEqual(wallet);
walletRepo.getIndex("publicKeys").set(publicKey, wallet);
expect(walletRepo.findByPublicKey(publicKey).getPublicKey()).toBeDefined();
expect(walletRepo.findByPublicKey(publicKey)).toEqual(wallet);
expect(walletRepo.findByAddress(address).getPublicKey()).toBeUndefined();
expect(walletRepo.findByAddress(address)).not.toEqual(wallet);
walletRepo.index(wallet);
expect(walletRepo.findByAddress(address).getPublicKey()).toBe(publicKey);
expect(walletRepo.findByAddress(address)).toEqual(wallet);
});
it("should get and set wallets by address", () => {
const address = "abcd";
const wallet = walletRepo.createWallet(address);
/**
* TODO: check this is desired behaviour
* after creation a wallet is unknown to indexers (until index() is called)
*/
expect(walletRepo.has(address)).toBeFalse();
/**
* TODO: check this is desired behaviour
* findByAddress and findByPublicKey have the effect of indexing (so the previous check now passes)
* findByUsername does not have this side-effect, so they should probably have different names.
*/
expect(walletRepo.findByAddress(address)).toEqual(wallet);
expect(walletRepo.has(address)).toBeTrue();
expect(walletRepo.findByIndex("addresses", address)).toEqual(wallet);
const nonExistingAddress = "abcde";
expect(walletRepo.has(address)).toBeTrue();
expect(walletRepo.has(nonExistingAddress)).toBeFalse();
expect(walletRepo.hasByAddress(address)).toBeTrue();
expect(walletRepo.hasByAddress(nonExistingAddress)).toBeFalse();
expect(walletRepo.hasByIndex("addresses", address)).toBeTrue();
expect(walletRepo.hasByIndex("addresses", nonExistingAddress)).toBeFalse();
expect(walletRepo.allByAddress()).toEqual([wallet]);
expect(walletRepo.allByIndex("addresses")).toEqual([wallet]);
});
/**
* TODO: Check this is desired behaviour.
* findByUsername (and corresponding findByIndex/findByIndexes) methods throw if it doesn't exist,
* where as findByAddress and findByPublicKey can be used for wallet creation.
*/
it("should create a wallet if one is not found during address lookup", () => {
expect(() => walletRepo.findByAddress("hello")).not.toThrow();
expect(walletRepo.findByAddress("iDontExist")).toBeInstanceOf(Wallet);
expect(walletRepo.has("hello")).toBeTrue();
expect(walletRepo.hasByAddress("iDontExist")).toBeTrue();
/**
* TODO: check this is desired behaviour
* Looking up a non-existing address by findByAddress creates a wallet.
* However looking up a non-existing address using findByIndex() does not.
*/
const errorMessage = "Wallet iAlsoDontExist doesn't exist in index addresses";
expect(() => walletRepo.findByIndex("addresses", "iAlsoDontExist")).toThrow(errorMessage);
});
it("should get and set wallets by public key", () => {
const wallet = walletRepo.createWallet("abcde");
const publicKey = "02337416a26d8d49ec27059bd0589c49bb474029c3627715380f4df83fb431aece";
walletRepo.getIndex("publicKeys").set(publicKey, wallet);
expect(walletRepo.findByPublicKey(publicKey)).toEqual(wallet);
expect(walletRepo.findByIndex("publicKeys", publicKey)).toEqual(wallet);
const nonExistingPublicKey = "98727416a26d8d49ec27059bd0589c49bb474029c3627715380f4df83fb431aece";
expect(walletRepo.has(publicKey)).toBeTrue();
expect(walletRepo.has(nonExistingPublicKey)).toBeFalse();
expect(walletRepo.hasByPublicKey(publicKey)).toBeTrue();
expect(walletRepo.hasByPublicKey(nonExistingPublicKey)).toBeFalse();
expect(walletRepo.hasByIndex("publicKeys", publicKey)).toBeTrue();
expect(walletRepo.hasByIndex("publicKeys", nonExistingPublicKey)).toBeFalse();
expect(walletRepo.allByPublicKey()).toEqual([wallet]);
expect(walletRepo.allByIndex("publicKeys")).toEqual([wallet]);
});
it("should create a wallet if one is not found during public key lookup", () => {
const firstNotYetExistingPublicKey = "0235d486fea0193cbe77e955ab175b8f6eb9eaf784de689beffbd649989f5d6be3";
expect(() => walletRepo.findByPublicKey(firstNotYetExistingPublicKey)).not.toThrow();
expect(walletRepo.findByPublicKey(firstNotYetExistingPublicKey)).toBeInstanceOf(Wallet);
/**
* TODO: check this is desired behaviour
* Looking up a non-existing publicKey by findByPublicKey creates a wallet.
* However looking up a non-existing publicKey using findByIndex() does not.
*/
const secondNotYetExistingPublicKey = "03a46f2547d20b47003c1c376788db5a54d67264df2ae914f70bf453b6a1fa1b3a";
expect(() => walletRepo.findByIndex("publicKeys", secondNotYetExistingPublicKey)).toThrow();
});
it("should get and set wallets by username", () => {
const username = "testUsername";
const wallet = walletRepo.createWallet("abcdef");
/**
* TODO: check this is desired behaviour
* A username hasn't been set on the wallet here, it's been set on the indexer.
* It means it's possible to look up a wallet by a username which is set on the WalletIndex and not the Wallet itself - this should probably throw.
*/
walletRepo.getIndex("usernames").set(username, wallet);
expect(walletRepo.findByUsername(username)).toEqual(wallet);
expect(walletRepo.findByIndex("usernames", username)).toEqual(wallet);
const nonExistingUsername = "iDontExistAgain";
expect(walletRepo.has(username)).toBeTrue();
expect(walletRepo.has(nonExistingUsername)).toBeFalse();
expect(walletRepo.hasByUsername(username)).toBeTrue();
expect(walletRepo.hasByUsername(nonExistingUsername)).toBeFalse();
expect(walletRepo.hasByIndex("usernames", username)).toBeTrue();
expect(walletRepo.hasByIndex("usernames", nonExistingUsername)).toBeFalse();
expect(walletRepo.allByUsername()).toEqual([wallet]);
expect(walletRepo.allByIndex("usernames")).toEqual([wallet]);
});
it("should be able to index forgotten wallets", () => {
const wallet1 = walletRepo.createWallet("wallet1");
walletRepo.index(wallet1);
expect(walletRepo.has("wallet1")).toBeTrue();
walletRepo.index(wallet1);
expect(walletRepo.has("wallet1")).toBeTrue();
});
it("should do nothing if forgotten wallet does not exist", () => {
const wallet1 = walletRepo.createWallet("wallet1");
walletRepo.index(wallet1);
// @ts-ignore
wallet1.publicKey = undefined;
expect(walletRepo.has("wallet2")).toBeFalse();
});
it("should index array of wallets using different indexers", () => {
const wallets: Contracts.State.Wallet[] = [];
const walletAddresses: string[] = [];
for (let i = 0; i < 6; i++) {
const walletAddress = `wallet${i}`;
walletAddresses.push(walletAddress);
const wallet = walletRepo.createWallet(walletAddress);
wallets.push(wallet);
}
for (const wallet of wallets) {
walletRepo.index(wallet);
}
walletAddresses.forEach((address) => expect(walletRepo.has(address)).toBeTrue());
const publicKey = "02511f16ffb7b7e9afc12f04f317a11d9644e4be9eb5a5f64673946ad0f6336f34";
walletRepo.getIndex("publicKeys").set(publicKey, wallets[1]);
walletRepo.getIndex("usernames").set("username", wallets[2]);
walletRepo.getIndex("resignations").set("resign", wallets[3]);
walletRepo.getIndex("locks").set("lock", wallets[4]);
walletRepo.getIndex("ipfs").set("ipfs", wallets[5]);
wallets.forEach((wallet) => walletRepo.index(wallet));
walletAddresses.forEach((address) => expect(walletRepo.has(address)).toBeTrue());
});
it("should get the nonce of a wallet", () => {
const wallet1 = walletRepo.createWallet("wallet1");
wallet1.setNonce(Utils.BigNumber.make(100));
wallet1.setPublicKey("02511f16ffb7b7e9afc12f04f317a11d9644e4be9eb5a5f64673946ad0f6336f34");
walletRepo.index(wallet1);
expect(walletRepo.getNonce(wallet1.getPublicKey()!)).toEqual(Utils.BigNumber.make(100));
});
it("should return 0 nonce if there is no wallet", () => {
const publicKey = "03c075494ad044ab8c0b2dc7ccd19f649db844a4e558e539d3ac2610c4b90a5139";
expect(walletRepo.getNonce(publicKey)).toEqual(Utils.BigNumber.ZERO);
});
it("should throw when looking up a username which doesn't exist", () => {
expect(() => walletRepo.findByUsername("iDontExist")).toThrowError(
"Wallet iDontExist doesn't exist in index usernames",
);
expect(() => walletRepo.findByIndex("usernames", "iDontExist")).toThrowError(
"Wallet iDontExist doesn't exist in index usernames",
);
});
describe("allByIndex", () => {
it("should return values on index", () => {
const wallet = walletRepo.findByAddress("address");
expect(walletRepo.allByIndex("addresses")).toEqual([wallet]);
});
});
describe("setOnIndex", () => {
it("should set wallet on index", () => {
const wallet = walletRepo.findByAddress("address");
walletRepo.setOnIndex("addresses", "address2", wallet);
expect(walletRepo.allByIndex("addresses")).toEqual([wallet, wallet]);
});
});
describe("forgetOnIndex", () => {
it("should forget wallet on index", () => {
const wallet = walletRepo.findByAddress("address");
expect(walletRepo.allByIndex("addresses")).toEqual([wallet]);
walletRepo.forgetOnIndex("addresses", "address");
expect(walletRepo.allByIndex("addresses")).toEqual([]);
});
});
}); | the_stack |
import Common from '../core/common';
import IterableStringMap from '../core/iterable';
import Subscriber from '../core/subscriber';
import Logger from '../logger/logger';
export const TextureImageExtensions = ['jpg', 'jpeg', 'png'];
export const TextureVideoExtensions = ['ogv', 'webm', 'mp4'];
export const TextureExtensions = TextureImageExtensions.concat(TextureVideoExtensions);
export enum TextureSourceType {
Data = 0,
Element = 1,
Url = 2,
}
export enum TextureFilteringType {
MipMap = 'mipmap',
Linear = 'linear',
Nearest = 'nearest',
}
export interface ITextureData {
data: Uint8Array;
width: number;
height: number;
}
export interface ITextureOptions {
url?: string;
element?: HTMLCanvasElement | HTMLImageElement | HTMLVideoElement | HTMLElement | Element;
data?: Uint8Array;
width?: number;
height?: number;
filtering?: TextureFilteringType;
repeat?: boolean;
UNPACK_FLIP_Y_WEBGL?: boolean;
UNPACK_PREMULTIPLY_ALPHA_WEBGL?: number;
TEXTURE_WRAP_S?: number; // gl.REPEAT | gl.CLAMP_TO_EDGE | gl.MIRRORED_REPEAT;
TEXTURE_WRAP_T?: number; // gl.REPEAT | gl.CLAMP_TO_EDGE | gl.MIRRORED_REPEAT;
}
export interface ITextureInput {
key: string;
url: string | HTMLCanvasElement | HTMLImageElement | HTMLVideoElement | Element | ITextureData;
options?: ITextureOptions;
}
export function isTextureData(object: any): object is ITextureData {
return 'data' in object && 'width' in object && 'height' in object;
}
// GL texture wrapper object for keeping track of a global set of textures, keyed by a unique user-defined name
export class Texture extends Subscriber {
key: string;
index: number;
options: ITextureOptions;
workpath: string;
width: number;
height: number;
texture: WebGLTexture;
source: string | HTMLCanvasElement | HTMLImageElement | HTMLVideoElement | Element | Uint8Array;
sourceType: TextureSourceType;
filtering: TextureFilteringType;
url: string;
valid: boolean = false;
dirty: boolean = false;
animated: boolean = false;
powerOf2: boolean = false;
constructor(
gl: WebGLRenderingContext | WebGL2RenderingContext,
key: string,
index: number,
options: ITextureOptions = { filtering: TextureFilteringType.Linear },
workpath?: string,
) {
super();
this.key = key;
this.index = index;
this.options = options;
this.workpath = workpath;
this.create(gl);
}
static isPowerOf2(value: number): boolean {
return (value & (value - 1)) === 0;
}
static isSafari(): boolean {
return /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
}
static isTextureUrl(text: string): boolean {
return text && (/\.(jpg|jpeg|png|ogv|webm|mp4)$/i).test(text.split('?')[0]);
}
static isTexture(
urlElementOrData: string | HTMLCanvasElement | HTMLImageElement | HTMLVideoElement | Element | ITextureData,
): boolean {
const options = Texture.getTextureOptions(urlElementOrData);
return options !== undefined;
}
static getMaxTextureSize(gl: WebGLRenderingContext | WebGL2RenderingContext): number {
return gl.getParameter(gl.MAX_TEXTURE_SIZE);
};
static getTextureOptions(
urlElementOrData: string | HTMLCanvasElement | HTMLImageElement | HTMLVideoElement | Element | ITextureData,
options: ITextureOptions = {}
): ITextureOptions {
if (typeof urlElementOrData === 'string' && urlElementOrData !== '') {
if (Texture.isTextureUrl(urlElementOrData)) {
options.url = urlElementOrData;
if (urlElementOrData.indexOf('?') !== -1) {
options = urlElementOrData.split('?')[1].split('&').reduce(function (prev: ITextureOptions, curr) {
const params = curr.split('=');
const key = decodeURIComponent(params[0]);
const value = decodeURIComponent(params[1]);
switch (key) {
case 'filtering':
prev[key] = value as TextureFilteringType;
break;
case 'repeat':
case 'UNPACK_FLIP_Y_WEBGL':
prev[key] = Boolean(value);
break;
case 'UNPACK_PREMULTIPLY_ALPHA_WEBGL':
case 'TEXTURE_WRAP_S':
case 'TEXTURE_WRAP_T':
prev[key] = Number(value);
break;
}
return prev;
}, options);
}
return options;
}
if (document) {
urlElementOrData = document.querySelector(urlElementOrData);
// Logger.log(urlElementOrData);
}
}
if (urlElementOrData instanceof HTMLCanvasElement || urlElementOrData instanceof HTMLImageElement || urlElementOrData instanceof HTMLVideoElement) {
options.element = urlElementOrData;
return options;
} else if (isTextureData(urlElementOrData)) {
options.data = urlElementOrData.data;
options.width = urlElementOrData.width;
options.height = urlElementOrData.height;
return options;
} else {
return null;
}
}
create(
gl: WebGLRenderingContext | WebGL2RenderingContext
): void {
this.texture = gl.createTexture();
if (this.texture) {
this.valid = true;
}
// Default to a 1-pixel black texture so we can safely render while we wait for an image to load
// See: http://stackoverflow.com/questions/19722247/webgl-wait-for-texture-to-load
// [255, 255, 255, 255]
this.setData(gl, 1, 1, new Uint8Array([0, 0, 0, 0]), this.options);
// this.bindTexture();
// this.load(options);
}
load(
gl: WebGLRenderingContext | WebGL2RenderingContext,
options: ITextureOptions = {}
): Promise<Texture> {
this.options = options;
if (typeof options.url === 'string') {
if (this.url === undefined || options.url !== this.url) {
return this.setUrl(gl, options.url, options);
} else {
return Promise.resolve(this);
}
} else if (options.element) {
return this.setElement(gl, options.element, options);
} else if (options.data && options.width && options.height) {
return this.setData(gl, options.width, options.height, options.data, options);
} else {
return Promise.reject(this);
}
}
setUrl(
gl: WebGLRenderingContext | WebGL2RenderingContext,
url: string,
options: ITextureOptions = {}
): Promise<Texture> {
if (!this.valid) {
return Promise.reject(this);
}
this.url = url; // save URL reference (will be overwritten when element is loaded below)
this.source = url;
this.sourceType = TextureSourceType.Url;
this.options = Object.assign(this.options, options);
const ext = url.split('?')[0].split('.').pop().toLowerCase();
const isVideo = TextureVideoExtensions.indexOf(ext) !== -1;
const src = Common.getResource(url, this.workpath);
let element: HTMLVideoElement | HTMLImageElement;
let promise: Promise<Texture>;
if (isVideo) {
Logger.log('GlslCanvas.setUrl video', src);
element = document.createElement('video'); // new HTMLVideoElement();
element.setAttribute('preload', 'auto');
// element.setAttribute('autoplay', 'true');
element.setAttribute('loop', 'true');
element.setAttribute('muted', 'true');
element.setAttribute('playsinline', 'true');
// element.autoplay = true;
element.loop = true;
element.muted = true;
/*
if (!(Texture.isSafari() && /(?<!http|https):\//.test(url))) {
element.crossOrigin = 'anonymous';
}
*/
promise = this.setElement(gl, element, options);
element.src = src;
element.addEventListener('canplay', () => {
(element as HTMLVideoElement).play();
});
} else {
Logger.log('GlslCanvas.setUrl image', src);
element = document.createElement('img'); // new HTMLImageElement();
promise = this.setElement(gl, element, options);
if (!(Texture.isSafari() && url.slice(0, 5) === 'data:')) {
element.crossOrigin = 'anonymous';
}
element.src = src;
}
return promise;
}
setElement(
gl: WebGLRenderingContext | WebGL2RenderingContext,
element: HTMLCanvasElement | HTMLImageElement | HTMLVideoElement | HTMLElement | Element,
options: ITextureOptions = {}
): Promise<Texture> {
options = this.options = Object.assign(this.options, options);
return new Promise((resolve, reject) => {
const originalElement = element;
// a string element is interpeted as a CSS selector
if (typeof element === 'string') {
element = document.querySelector(element);
}
if (element instanceof HTMLCanvasElement ||
element instanceof HTMLImageElement ||
element instanceof HTMLVideoElement) {
this.source = element;
this.sourceType = TextureSourceType.Element;
if (element instanceof HTMLVideoElement) {
const video = element as HTMLVideoElement;
video.addEventListener('loadeddata', (event) => {
this.update(gl, options);
this.setFiltering(gl, options);
resolve(this);
});
video.addEventListener('error', (error) => {
reject(error);
});
video.load();
} else if (element instanceof HTMLImageElement) {
element.addEventListener('load', () => {
this.update(gl, options);
this.setFiltering(gl, options);
resolve(this);
});
element.addEventListener('error', (error) => {
reject(error);
});
} else {
this.update(gl, options);
this.setFiltering(gl, options);
resolve(this);
}
} else {
let message = `the 'element' parameter (\`element: ${JSON.stringify(originalElement)}\`) must be a CSS selector string, or a <canvas>, <image> or <video> object`;
Logger.log(`Texture '${this.key}': ${message}`, options);
reject(message);
}
});
}
setData(
gl: WebGLRenderingContext | WebGL2RenderingContext,
width: number,
height: number,
data: Uint8Array,
options: ITextureOptions = {}
): Promise<Texture> {
this.width = width;
this.height = height;
this.options = Object.assign(this.options, options);
this.source = data;
this.sourceType = TextureSourceType.Data;
this.update(gl, options);
this.setFiltering(gl, options);
return Promise.resolve(this);
}
// Uploads current image or buffer to the GPU (can be used to update animated textures on the fly)
update(
gl: WebGLRenderingContext | WebGL2RenderingContext,
options: ITextureOptions
): void {
if (!this.valid) {
return;
}
gl.activeTexture(gl.TEXTURE0 + this.index);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, (options.UNPACK_FLIP_Y_WEBGL === false ? 0 : 1));
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, options.UNPACK_PREMULTIPLY_ALPHA_WEBGL || 0);
if (this.sourceType === TextureSourceType.Element) {
if (this.source instanceof HTMLImageElement && this.source.naturalWidth && this.source.naturalHeight) {
this.width = this.source.naturalWidth;
this.height = this.source.naturalHeight;
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, this.source);
} else if (this.source instanceof HTMLVideoElement && this.source.videoWidth && this.source.videoHeight) {
this.width = this.source.videoWidth;
this.height = this.source.videoHeight;
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, this.source);
} else if (this.source instanceof HTMLCanvasElement) {
this.width = this.source.width;
this.height = this.source.height;
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, this.source);
}
} else if (this.sourceType === TextureSourceType.Data) {
const imageBuffer: ArrayBufferView = this.source as ArrayBufferView;
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, this.width, this.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, imageBuffer);
}
this.trigger('loaded', this);
}
tryUpdate(gl: WebGLRenderingContext | WebGL2RenderingContext): boolean {
let dirty = false;
if (this.animated) {
dirty = true;
this.update(gl, this.options);
}
return dirty;
}
destroy(gl: WebGLRenderingContext | WebGL2RenderingContext): void {
if (!this.valid) {
return;
}
gl.deleteTexture(this.texture);
this.texture = null;
delete this.source;
this.source = null;
this.valid = false;
}
setFiltering(
gl: WebGLRenderingContext | WebGL2RenderingContext,
options: ITextureOptions
): void {
if (!this.valid) {
return;
}
const powerOf2 = Texture.isPowerOf2(this.width) && Texture.isPowerOf2(this.height);
let filtering = options.filtering || TextureFilteringType.MipMap;
let wrapS = options.TEXTURE_WRAP_S || (options.repeat ? gl.REPEAT : gl.CLAMP_TO_EDGE);
let wrapT = options.TEXTURE_WRAP_T || (options.repeat ? gl.REPEAT : gl.CLAMP_TO_EDGE);
if (!powerOf2) {
filtering = filtering === TextureFilteringType.MipMap ? TextureFilteringType.Linear : filtering;
wrapS = wrapT = gl.CLAMP_TO_EDGE;
if (options.repeat || options.TEXTURE_WRAP_S || options.TEXTURE_WRAP_T) {
Logger.warn(`GlslCanvas: cannot repeat texture ${options.url} cause is not power of 2.`);
}
}
this.powerOf2 = powerOf2;
this.filtering = filtering;
// this.bindTexture();
// WebGL has strict requirements on non-power-of-2 textures:
// No mipmaps and must clamp to edge
// For power-of-2 textures, the following presets are available:
// mipmap: linear blend from nearest mip
// linear: linear blend from original image (no mips)
// nearest: nearest pixel from original image (no mips, 'blocky' look)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, wrapS);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, wrapT);
if (this.filtering === TextureFilteringType.MipMap) {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_LINEAR); // TODO: use trilinear filtering by defualt instead?
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.generateMipmap(gl.TEXTURE_2D);
} else if (this.filtering === TextureFilteringType.Nearest) {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
} else if (this.filtering === TextureFilteringType.Linear) {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
}
}
}
export default class Textures extends IterableStringMap<Texture> {
count: number = 0;
dirty: boolean = false;
animated: boolean = false;
clean() {
Object.keys(this.values).forEach((key) => {
this.values[key].dirty = false;
});
this.dirty = false;
}
createOrUpdate(
gl: WebGLRenderingContext | WebGL2RenderingContext,
key: string,
urlElementOrData: string | HTMLCanvasElement | HTMLImageElement | HTMLVideoElement | Element | ITextureData,
index: number = 0,
options: ITextureOptions = {},
workpath: string,
): Promise<Texture> {
let texture;
const textureOptions = Texture.getTextureOptions(urlElementOrData, options);
texture = this.get(key);
if (!texture) {
texture = new Texture(gl, key, index + this.count, textureOptions, workpath);
this.count++;
this.set(key, texture);
}
if (textureOptions !== undefined) {
return texture.load(gl, textureOptions).then(
(texture) => {
if (texture.source instanceof HTMLVideoElement) {
const video = texture.source as HTMLVideoElement;
// Logger.log('video', video);
video.addEventListener('play', () => {
// Logger.log('play', texture.key);
texture.animated = true;
this.animated = true;
});
video.addEventListener('pause', () => {
// Logger.log('pause', texture.key);
texture.animated = false;
this.animated = this.reduce((flag: boolean, texture: Texture) => {
return flag || texture.animated;
}, false);
});
video.addEventListener('seeked', () => {
// Logger.log('seeked');
texture.update(gl, texture.options);
this.dirty = true;
});
// Logger.log('video');
/*
video.addEventListener('canplaythrough', () => {
// !!!
this.intervalID = setInterval(() => {
this.update(gl, options);
}, 15);
}, true);
video.addEventListener('ended', () => {
video.currentTime = 0;
video.play();
}, true);
*/
}
return texture;
}
);
} else {
return Promise.resolve(texture);
}
}
} | the_stack |
/// <reference types="riot-games-api" />
export const BASE_URL: string;
export const URL_1_2: string;
export const URL_1_3: string;
export const URL_1_4: string;
export const URL_2_2: string;
export const URL_2_4: string;
export const URL_2_5: string;
export declare enum region_e {
BR = 0,
EUNE = 1,
EUW = 2,
KR = 3,
LAN = 4,
LAS = 5,
NA = 6,
OCE = 7,
TR = 8,
RU = 9,
PBE = 10,
}
/**
* Base API
*/
export declare class API {
private ApiKeys;
private ApiKey;
constructor(ApiKeys: string[]);
/**
* Change the Api Key for the next requests
*/
private switchApiKey();
/**
* Send a request to the Riot Games Api and return a formatted json via a callback
* @param {string} url request url
* @param {string} method method(post / put / get)
* @param {[type]} data body parameters
* @param {(JSON} callback callback function with formatted JSON
*/
getJSON(url: string, method: string, data: any): Promise<any>;
request(url: string, method: string, data: any, prop?: string): Promise<any>;
/**
* get the API Key that is used for the requests
* @return {string} the current API Key
*/
getCurrentApiKey(): string;
/**
* set the API Keys
* @param {string[]} ApiKeys the API Keys
*/
setApikeys(ApiKeys: string[]): void;
}
/**
* Tournament API
*/
export declare class TournamentAPI extends API {
constructor(...apiKeys: string[]);
/**
* create tournament Codes for a given tournament
* @param {number} tournamentId the ID of the tournament
* @param {number} count Number of codes you want
* @param {RiotGamesAPI.TournamentProvider.TournamentCodeParameters} params Tournament Code parameters
* @param {number[]} callback Tournaments Codes [description]
*/
createTournamentCodes(tournamentId: number, count: number, params: RiotGamesAPI.TournamentProvider.TournamentCodeParameters): Promise<number[]>;
/**
* get tournament infos for a given tournament code
* @param {string} tournamentCode Tournament Code
* @param {RiotGamesAPI.TournamentProvider.TournamentCodeDto} callback Tournament Infos
*/
getTournamentByCode(tournamentCode: string): Promise<RiotGamesAPI.TournamentProvider.TournamentCodeDto>;
/**
* edit the tournament Code parameters for a given tournament Code
* @param {string} tournamentCode Tournament Code to update
* @param {RiotGamesAPI.TournamentProvider.TournamentCodeUpdateParameters} params parameters to edit
* @param {(} callback callback if succes
*/
editTournamentByCode(tournamentCode: string, params: RiotGamesAPI.TournamentProvider.TournamentCodeUpdateParameters): Promise<() => void>;
/**
* get the lobby envents for a given tournament Code
* @param {string} tournamentCode the tournament code to get the lobby events
* @param {RiotGamesAPI.TournamentProvider.LobbyEventDto} callback lobby events
*/
getLobbyEventByCode(tournamentCode: string): Promise<RiotGamesAPI.TournamentProvider.LobbyEventDto>;
/**
* Register a new tournament provider
* @param {region_e} region region where you want to register the provider
* @param {string} url url of callback for the POST notifications
* @param {number} callback returns the tounament provider ID
*/
registerProvider(region: region_e, url: string): Promise<number>;
/**
* Register a new tournament
* @param {string} name Name of tournament
* @param {number} providerId Provider ID
* @param {number} callback returns the tournament ID
*/
registerTournament(name: string, providerId: number): Promise<number>;
}
export declare class ClassicAPI extends API {
private region;
/**
* ClassicAPI Constructor
* @param {string[]} ApiKeys API Keys for the requests
* @param {region_e} region region where you want to send requests
*/
constructor(ApiKeys: string[], region: region_e);
/**
* Edit the consts for a valid url for the riot games api
* @param {string} unparsedURL the URL to parse
* @return {string} the Parsed URL
*/
parseURL(unparsedURL: string): string;
/**
* get the region where send send request
* @return {region_e} the current region
*/
getRegion(): region_e;
/**
* set the region where you want to send requests
* @param {region_e} region the region
*/
setRegion(region: region_e): void;
/**
* get all champions of league of legends
* @param {RiotGamesAPI.Champion.ChampionListDto} callback data callback
*/
getChampions(): Promise<RiotGamesAPI.Champion.ChampionListDto>;
/**
* get the champion for a given id
* @param {number} id the champion id
* @param {RiotGamesAPI.Champion.ChampionDto} callback data callback
*/
getChampionById(id: number): Promise<RiotGamesAPI.Champion.ChampionDto>;
/**
* get the free to play champions
* @param {RiotGamesAPI.Champion.ChampionListDto} callback data callback
*/
getFreeToPlayChampions(): Promise<RiotGamesAPI.Champion.ChampionListDto>;
/**
* get Champion mastery of a player for a given champion ID
* @param {number} summonerId summoner ID
* @param {number} championId Champion ID
* @param {RiotGamesAPI.ChampionMastery.ChampionMasteryDto} callback data callback
*/
getChampionMastery(summonerId: number, championId: number): Promise<RiotGamesAPI.ChampionMastery.ChampionMasteryDto>;
/**
* get all champion masteries for a given summoner
* @param {number} summonerId Summoner ID
* @param {[RiotGamesAPI.ChampionMastery.ChampionMasteryDto]} callback data callback
*/
getChampionMasteryBySummoner(summonerId: number): Promise<[RiotGamesAPI.ChampionMastery.ChampionMasteryDto]>;
/**
* get the mastery score of a summoner
* @param {number} summonerId Summoner ID
* @param {number} callback Mastery Score
*/
getChampionMasteryScore(summonerId: number): Promise<number>;
/**
* get The 3 best champion masteries
* @param {[type]} summonerId Summoner ID
* @param {[RiotGamesAPI.ChampionMastery.ChampionMasteryDto]} callback data callback
*/
getTopChampionMastery(summonerId: any): Promise<[RiotGamesAPI.ChampionMastery.ChampionMasteryDto]>;
/**
* get the current game infos for a given summoner ID
* @param {number} summonerId Summoner ID
* @param {RiotGamesAPI.CurrentGame.CurrentGameInfo} callback data callback
*/
getCurrentGame(summonerId: number): Promise<RiotGamesAPI.CurrentGame.CurrentGameInfo>;
/**
* get the featured games
* @param {RiotGamesAPI.FeaturedGames.FeaturedGames} callback data callback
*/
getFeaturedGame(): Promise<RiotGamesAPI.FeaturedGames.FeaturedGames>;
/**
* get the recents games for a given Summoner ID
* @param {number} summonerId Summoner ID
* @param {RiotGamesAPI.Game.RecentGamesDto} callback data callback
*/
getRecentGames(summonerId: number): Promise<RiotGamesAPI.Game.RecentGamesDto>;
/**
* Get League infos of a summoner
* @param {number} summonerId Summoner ID
* @param {RiotGamesAPI.League.LeagueDto[]} callback data callback
*/
getLeagueBySummonerId(summonerId: number): Promise<RiotGamesAPI.League.LeagueDto[]>;
/**
* get League infos of a summoner
* @param {number} summonerId Summoner ID
* @param {RiotGamesAPI.League.LeagueDto[]} callback data callback
*/
getLeagueBySummonerIdEntry(summonerId: number): Promise<RiotGamesAPI.League.LeagueDto[]>;
/**
* get league infos by team
* @param {string} teamId Team ID
* @param {RiotGamesAPI.League.LeagueDto[]} callback data callback
*/
getLeagueByTeamId(teamId: string): Promise<RiotGamesAPI.League.LeagueDto[]>;
/**
* get league infos by team
* @param {string} teamId Team ID
* @param {RiotGamesAPI.League.LeagueDto[]} callback data callback
*/
getLeagueByTeamIdEntry(teamId: string): Promise<RiotGamesAPI.League.LeagueDto[]>;
/**
* get Challengers in SOLO Queue
* @param {RiotGamesAPI.League.LeagueDto} callback data callback
*/
getChallengers_SOLO(): Promise<RiotGamesAPI.League.LeagueDto>;
/**
* get Challengers Teams in 3x3
* @param {RiotGamesAPI.League.LeagueDto} callback data callback
*/
getChallengers_3x3(): Promise<RiotGamesAPI.League.LeagueDto>;
/**
* get Challengers Teams in 5x5
* @param {RiotGamesAPI.League.LeagueDto} callback data callback
*/
getChallengers_5x5(): Promise<RiotGamesAPI.League.LeagueDto>;
/**
* get Masters in Solo Queue
* @param {RiotGamesAPI.League.LeagueDto} callback data callback
*/
getMasters_SOLO(): Promise<RiotGamesAPI.League.LeagueDto>;
/**
* get Master Teams in 3x3
* @param {RiotGamesAPI.League.LeagueDto} callback data callback
*/
getMasters_3x3(): Promise<RiotGamesAPI.League.LeagueDto>;
/**
* get Master Teams in 5x5
* @param {RiotGamesAPI.League.LeagueDto} callback data callback
*/
getMasters_5x5(): Promise<RiotGamesAPI.League.LeagueDto>;
/**
* get Champions (static data)
* @param {RiotGamesAPI.LolStaticData.ChampionListDto} callback data callback
*/
staticDataChampions(): Promise<RiotGamesAPI.LolStaticData.ChampionListDto>;
/**
* get data by champion ID
* @param {number} championsId Champion ID
* @param {RiotGamesAPI.LolStaticData.ChampionDto} callback data callback
*/
staticDataChampionById(championsId: number): Promise<RiotGamesAPI.LolStaticData.ChampionDto>;
/**
* get League of Legends Items
* @param {RiotGamesAPI.LolStaticData.ItemListDto} callback data callback
*/
staticDataItems(): Promise<RiotGamesAPI.LolStaticData.ItemListDto>;
/**
* Get item infos by ID
* @param {number} itemId item ID
* @param {RiotGamesAPI.LolStaticData.ItemDto} callback data callback
*/
staticDataItemById(itemId: number): Promise<RiotGamesAPI.LolStaticData.ItemDto>;
/**
* get league of legends languages
* @param {RiotGamesAPI.LolStaticData.LanguageStringsDto} callback data callback
*/
staticDataLanguagesStrings(): Promise<RiotGamesAPI.LolStaticData.LanguageStringsDto>;
/**
* get league of legends languages
* @param {string[]} callback data callback
*/
staticDataLanguages(): Promise<string[]>;
/**
* get Map data
* @param {RiotGamesAPI.LolStaticData.MapDataDto} callback data callback
*/
staticDataMap(): Promise<RiotGamesAPI.LolStaticData.MapDataDto>;
/**
* get all masteries
* @param {RiotGamesAPI.LolStaticData.MasteryListDto} callback data callback
*/
staticDataMastery(): Promise<RiotGamesAPI.LolStaticData.MasteryListDto>;
/**
* get data by mastery ID
* @param {number} masteryId Mastery ID
* @param {RiotGamesAPI.LolStaticData.MasteryDto} callback data callback
*/
staticDataMasteryById(masteryId: number): Promise<RiotGamesAPI.LolStaticData.MasteryDto>;
staticDataRealm(): Promise<RiotGamesAPI.LolStaticData.RealmDto>;
/**
* get all runes
* @param {RiotGamesAPI.LolStaticData.RuneListDto} callback data callback
*/
staticDataRunes(): Promise<RiotGamesAPI.LolStaticData.RuneListDto>;
/**
* get rune by Rune ID
* @param {number} runeId Rune ID
* @param {RiotGamesAPI.LolStaticData.RuneDto} callback data callback
*/
staticDataRuneById(runeId: number): Promise<RiotGamesAPI.LolStaticData.RuneDto>;
/**
* get all summoner spells
* @param {RiotGamesAPI.LolStaticData.SummonerSpellListDto} callback data callback
*/
staticDataSummonerSpells(): Promise<RiotGamesAPI.LolStaticData.SummonerSpellListDto>;
/**
* get summoner spell by summoner spell ID
* @param {number} summonerSpellId Summoner spell ID
* @param {RiotGamesAPI.LolStaticData.SummonerSpellDto} callback data callback
*/
staticDataSummonSpellById(summonerSpellId: number): Promise<RiotGamesAPI.LolStaticData.SummonerSpellDto>;
/**
* get league of legends versions
* @param {string[]} callback data callback
*/
staticDataVersion(): Promise<string[]>;
/**
* get league of legends status
* @param {RiotGamesAPI.LolStatus.Shard[]} callback data callback
*/
getStatus(): Promise<RiotGamesAPI.LolStatus.Shard[]>;
/**
* get status for a given region
* @param {region_e} region region
* @param {RiotGamesAPI.LolStatus.Shard} callback data callback
*/
getStatusByRegion(region: region_e): Promise<RiotGamesAPI.LolStatus.Shard>;
/**
* get match infos for a given match ID
* @param {number} matchId Match ID
* @param {RiotGamesAPI.Match.MatchDetail} callback data callback
*/
getMatch(matchId: number): Promise<RiotGamesAPI.Match.MatchDetail>;
/**
* get all matches for a given tournament code
* @param {string} tournamentCode Tournament Code
* @param {number[]} callback data callback
*/
getMatchIdsByTournamentCode(tournamentCode: string): Promise<number[]>;
/**
* get match by ID in a tournament
* @param {number} matchId Match ID
* @param {RiotGamesAPI.Match.MatchDetail} callback data callback
*/
getMatchForTournament(matchId: number): Promise<RiotGamesAPI.Match.MatchDetail>;
/**
* get match list of a summoner
* @param {number} summonerId Summoner ID
* @param {RiotGamesAPI.MatchList.MatchList} callback data callback
*/
getMatchList(summonerId: number): Promise<RiotGamesAPI.MatchList.MatchList>;
/**
* get ranked stats of summoner
* @param {number} summonerId Summoner ID
* @param {RiotGamesAPI.Stats.RankedStatsDto} callback data callback
*/
getStatsRanked(summonerId: number): Promise<RiotGamesAPI.Stats.RankedStatsDto>;
/**
* get summary ranked stats of summoner
* @param {number} summonerId Summoner ID
* @param {RiotGamesAPI.Stats.PlayerStatsSummaryListDto} callback data callback
*/
getStatsSummary(summonerId: number): Promise<RiotGamesAPI.Stats.PlayerStatsSummaryListDto>;
/**
* get summoner infos by Summoner Name
* @param {string} summonerName Summoner Name
* @param {RiotGamesAPI.Summoner.SummonerDto} callback data callback
*/
getSummonerByName(summonerName: string): Promise<RiotGamesAPI.Summoner.SummonerDto>;
/**
* get summoner infos by summoner ID
* @param {number} summonerId Summoner ID
* @param {RiotGamesAPI.Summoner.SummonerDto} callback data callback
*/
getSummonerById(summonerId: number): Promise<RiotGamesAPI.Summoner.SummonerDto>;
/**
* get masteries of a summoner
* @param {number} summonerId Summoner ID
* @param {RiotGamesAPI.Summoner.MasteryPagesDto} callback data callback
*/
getSummonerMasteries(summonerId: number): Promise<RiotGamesAPI.Summoner.MasteryPagesDto>;
/**
* get the Summoner Name of a summoner ID
* @param {number} summonerId Summoner ID
* @param {string} callback data callback
*/
getSummonerName(summonerId: number): Promise<string>;
/**
* get the runes of a summoner
* @param {number} summonerId Summoner ID
* @param {RiotGamesAPI.Summoner.RunePagesDto} callback data callback
*/
getSummonerRunes(summonerId: number): Promise<RiotGamesAPI.Summoner.RunePagesDto>;
/**
* get teams of a summoner
* @param {number} summonerId Summoner ID
* @param {RiotGamesAPI.Team.TeamDto[]} callback data callback
*/
getTeamsBySummoner(summonerId: number): Promise<RiotGamesAPI.Team.TeamDto[]>;
/**
* get Team infos by Team ID
* @param {string} teamId Team ID
* @param {RiotGamesAPI.Team.TeamDto} callback data callback
*/
getTeamById(teamId: string): Promise<RiotGamesAPI.Team.TeamDto>;
} | the_stack |
declare module "love.audio" {
import { File, FileData } from "love.filesystem";
import { Decoder, SoundData } from "love.sound";
/**
* Gets a list of the names of the currently enabled effects.
*
* @returns The list of the names of the currently enabled effects.
* @link [love.audio.getActiveEffects](https://love2d.org/wiki/love.audio.getActiveEffects)
* @since 11.0
*/
function getActiveEffects(): string[];
/**
* Gets the current number of simultaneously playing sources.
*
* @returns The current number of simultaneously playing sources.
* @link [love.audio.getActiveSourceCount](https://love2d.org/wiki/love.audio.getActiveSourceCount)
* @since 11.0
*/
function getActiveSourceCount(): number;
/**
* Returns the distance attenuation model.
*
* @returns The current distance model. Default is `inverseclamped`.
* @link [love.audio.getDistanceModel](https://love2d.org/wiki/love.audio.getDistanceModel)
* @since 0.8.0
*/
function getDistanceModel(): DistanceModel;
/**
* Gets the current global scale factor for velocity-based doppler effects.
* @returns The current doppler scale factor. Default is `1`.
* @link [love.audio.getDopplerScale](https://love2d.org/wiki/love.audio.getDopplerScale)
* @since 0.9.2
*/
function getDopplerScale(): number;
/**
* Gets the settings associated with an effect.
*
* @param name The name of the effect.
* @returns The settings associated with the effect.
* @link [love.audio.getEffect](https://love2d.org/wiki/love.audio.getEffect)
* @since 11.0
*/
function getEffect(name: string): EffectSettings | undefined;
/**
* Gets the maximum number of active effects supported by the system.
*
* @returns The maximum number of active effects.
* @link [love.audio.getMaxSceneEffects](https://love2d.org/wiki/love.audio.getMaxSceneEffects)
* @since 11.0
*/
function getMaxSceneEffects(): number;
/**
* Gets the maximum number of active Effects in a single Source object, that the system can support.
*
* @returns The maximum number of active Effects per Source.
* @link [love.audio.getMaxSourceEffects](https://love2d.org/wiki/love.audio.getMaxSourceEffects)
* @since 11.0
*/
function getMaxSourceEffects(): number;
/**
* Returns the orientation of the listener.
*
* ```ts
* const [fx, fy, fz, ux, uy, uz] = love.audio.getOrientation();
* ```
*
* @returns The X, Y and Z components of the Forward vector of the listener orientation and the Up vector of the listener orientation.
* @link [love.audio.getOrientation](https://love2d.org/wiki/love.audio.getOrientation)
*/
function getOrientation(): LuaMultiReturn<[fx: number, fy: number, fz: number, ux: number, uy: number, uz: number]>;
/**
* Returns the position of the listener. Only works for mono sources.
*
* @returns The X, Y and Z position of the listener.
* @link [love.audio.getPosition](https://love2d.org/wiki/love.audio.getPosition)
*/
function getPosition(): LuaMultiReturn<[x: number, y: number, z: number]>;
/**
* Gets a list of {@link RecordingDevice RecordingDevices} on the system.
*
* @return The list of connected recording devices.
* @link [love.audio.getRecordingDevices](https://love2d.org/wiki/love.audio.getRecordingDevices)
* @since 11.0
*/
function getRecordingDevices(): RecordingDevice[];
/**
* Returns the velocity of the listener.
*
* @returns The X, Y and Z velocity of the listener.
* @link [love.audio.getVelocity](https://love2d.org/wiki/love.audio.getVelocity)
*/
function getVelocity(): LuaMultiReturn<[x: number, y: number, z: number]>;
/**
* Returns the master volume.
*
* @returns The current master volume.
* @link [love.audio.getVolume](https://love2d.org/wiki/love.audio.getVolume)
*/
function getVolume(): number;
/**
* Gets whether audio effects are supported in the system.
*
* @returns `true` if effects are supported.
* @link [love.audio.isEffectsSupported](https://love2d.org/wiki/love.audio.isEffectsSupported)
* @since 11.0
*/
function isEffectsSupported(): boolean;
/**
* Creates a new Source usable for real-time generated sound playback with {@link Source.queue}.
*
* @param samplerate Number of samples per second when playing.
* @param bitdepth Bits per sample (`8` or `16`).
* @param channels `1` for mono or `2` for stereo.
* @param buffercount The number of buffers that can be queued up at any given time with Source:queue. Cannot be greater than `64`. A sensible default (`~8`) is chosen if no value is specified. (Default `0`)
* @returns The new Source usable with {@link Source.queue}.
* @link [love.audio.newQueueableSource](https://love2d.org/wiki/love.audio.newQueueableSource)
* @since 11.0
*/
function newQueueableSource(samplerate: number, bitdepth: 8 | 16, channels: 1 | 2, buffercount?: number): Source;
/**
* Creates a new Source.
*
* @param source filepath to audio file, File to audio file, Decoder or FileData from an audio file.
* @param type Streaming or static source.
* @returns A new Source that can play the specified audio.
* @link [love.audio.newSource](https://love2d.org/wiki/love.audio.newSource)
*/
function newSource(source: string | File | Decoder | FileData, type: SourceType): Source;
function newSource(data: SoundData): Source;
/**
* Pauses all currently active Sources and returns them.
*
* @returns A list of Sources that were paused by this call.
* @link [love.audio.pause](https://love2d.org/wiki/love.audio.pause)
*/
function pause(): Source[];
/**
* Pauses the given {@link Source Sources}.
*
* @param sources The sources on which to pause the playback.
* @link [love.audio.pause](https://love2d.org/wiki/love.audio.pause)
*/
function pause(...sources: Source[]): void;
function pause(sources: Source[]): void;
/**
* Plays the specified {@link Source}.
*
* @param source The Source to play.
* @link [love.audio.play](https://love2d.org/wiki/love.audio.play)
*/
function play(source: Source): void;
/**
* Starts playing multiple {@link Source Sources} simultaneously.
*
* @param sources A list of sources to play.
* @link [love.audio.play](https://love2d.org/wiki/love.audio.play)
*/
function play(...sources: Source[]): void;
function play(sources: Source[]): void;
/**
* Sets the distance attenuation model.
*
* @param model The new distance model.
* @link [love.audio.setDistanceModel](https://love2d.org/wiki/love.audio.setDistanceModel)
* @since 0.8.0
*/
function setDistanceModel(model: DistanceModel): void;
/**
* Sets a global scale factor for velocity-based doppler effects.
*
* @param scale The new doppler scale factor. Greater than 0. (Default `1`)
* @link [love.audio.setDopplerScale](https://love2d.org/wiki/love.audio.setDopplerScale)
* @since 0.9.2
*/
function setDopplerScale(scale: number): void;
/**
* Defines an effect that can be applied to a {@link Source}.
*
* @param name The name of the effect.
* @param settings The settings to use for this effect.
* @returns `true` if the effect was successfully created.
* @link [love.audio.setEffect](https://love2d.org/wiki/love.audio.setEffect)
* @since 11.0
*/
function setEffect(name: string, settings: EffectSettings): boolean;
/**
* Defines an effect that can be applied to a {@link Source}.
*
* @param name The name of the effect.
* @param enabled If `false` and the given effect name was previously set, disables the effect. (Default `true`)
* @returns `true` if the effect was successfully disabled.
* @link [love.audio.setEffect](https://love2d.org/wiki/love.audio.setEffect)
* @since 11.0
*/
function setEffect(name: string, enabled?: boolean): boolean;
/**
* Sets whether the system should mix the audio with the system's audio.
*
* @param mix `true` to enable mixing, `false` to disable it.
* @returns `true` if the change succeeded.
* @link [love.audio.setMixWithSystem](https://love2d.org/wiki/love.audio.setMixWithSystem)
* @since 11.0
*/
function setMixWithSystem(mix: boolean): boolean;
/**
* Sets the orientation of the listener.
*
* @param fx The X component of the forward vector of the listener orientation.
* @param fy The Y component of the forward vector of the listener orientation.
* @param fz The Z component of the forward vector of the listener orientation.
* @param ux The X component of the up vector of the listener orientation.
* @param uy The Y component of the up vector of the listener orientation.
* @param uz The Z component of the up vector of the listener orientation.
* @link [love.audio.setOrientation](https://love2d.org/wiki/love.audio.setOrientation)
*/
function setOrientation(fx: number, fy: number, fz: number, ux: number, uy: number, uz: number): void;
/**
* Sets the position of the listener, which determines how sounds play.
*
* @param x The X position of the listener.
* @param y The Y position of the listener.
* @param z The Z position of the listener.
* @link [love.audio.setPosition](https://love2d.org/wiki/love.audio.setPosition)
*/
function setPosition(x: number, y: number, z: number): void;
/**
* Sets the velocity of the listener.
*
* @param x The X velocity of the listener.
* @param y The Y velocity of the listener.
* @param z The Z velocity of the listener.
* @link [love.audio.setVelocity](https://love2d.org/wiki/love.audio.setVelocity)
*/
function setVelocity(x: number, y: number, z: number): void;
/**
* Sets the master volume.
*
* @param volume `1.0` is max and `0.0` is off.
* @link [love.audio.setVolume](https://love2d.org/wiki/love.audio.setVolume)
*/
function setVolume(volume: number): void;
/**
* Stops currently played {@link Source Sources}.
*
* @link [love.audio.stop](https://love2d.org/wiki/love.audio.stop)
*/
function stop(): void;
/**
* Simultaneously stops all given {@link Source Sources}.
*
* @param sources The list of sources to stop
* @link [love.audio.stop](https://love2d.org/wiki/love.audio.stop)
*/
function stop(...sources: Source[]): void;
function stop(sources: Source[]): void;
} | the_stack |
//@ts-check
///<reference path="devkit.d.ts" />
declare namespace DevKit {
namespace FormLetter {
interface Header extends DevKit.Controls.IHeader {
/** Enter the user or team who is assigned to manage the record. This field is updated every time the record is assigned to a different user. */
OwnerId: DevKit.Controls.Lookup;
/** Select the priority so that preferred customers or critical issues are handled quickly. */
PriorityCode: DevKit.Controls.OptionSet;
/** Enter the expected due date and time. */
ScheduledEnd: DevKit.Controls.DateTime;
/** Shows whether the letter is open, completed, or canceled. Completed and canceled letters are read-only and can't be edited. */
StateCode: DevKit.Controls.OptionSet;
}
interface tab_SUMMARY_TAB_Sections {
general_information: DevKit.Controls.Section;
Letter_description: DevKit.Controls.Section;
Letter_details: DevKit.Controls.Section;
tab_2_section_2: DevKit.Controls.Section;
}
interface tab_SUMMARY_TAB extends DevKit.Controls.ITab {
Section: tab_SUMMARY_TAB_Sections;
}
interface Tabs {
SUMMARY_TAB: tab_SUMMARY_TAB;
}
interface Body {
Tab: Tabs;
/** Type the number of minutes spent creating and sending the letter. The duration is used in reporting. */
ActualDurationMinutes: DevKit.Controls.Integer;
/** Type the complete recipient address for the letter to ensure timely delivery. */
Address: DevKit.Controls.String;
/** Type the letter body or additional information to describe the letter, such as the primary message or the products and services described. */
Description: DevKit.Controls.String;
/** Select the direction of the letter as incoming or outbound. */
DirectionCode: DevKit.Controls.Boolean;
/** Enter the account, contact, lead, or user who sent the letter. */
from: DevKit.Controls.Lookup;
/** Unique identifier of the object with which the letter activity is associated. */
RegardingObjectId: DevKit.Controls.Lookup;
/** Type a short description about the objective or primary topic of the letter. */
Subject: DevKit.Controls.String;
/** Enter the account, contact, lead, or user recipients for the letter. */
to: DevKit.Controls.Lookup;
}
}
class FormLetter extends DevKit.IForm {
/**
* DynamicsCrm.DevKit form Letter
* @param executionContext the execution context
* @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource"
*/
constructor(executionContext: any, defaultWebResourceName?: string);
/** Utility functions/methods/objects for Dynamics 365 form */
Utility: DevKit.Utility;
/** The Body section of form Letter */
Body: DevKit.FormLetter.Body;
/** The Header section of form Letter */
Header: DevKit.FormLetter.Header;
}
class LetterApi {
/**
* DynamicsCrm.DevKit LetterApi
* @param entity The entity object
*/
constructor(entity?: any);
/**
* Get the value of alias
* @param alias the alias value
* @param isMultiOptionSet true if the alias is multi OptionSet
*/
getAliasedValue(alias: string, isMultiOptionSet?: boolean): any;
/**
* Get the formatted value of alias
* @param alias the alias value
* @param isMultiOptionSet true if the alias is multi OptionSet
*/
getAliasedFormattedValue(alias: string, isMultiOptionSet?: boolean): string;
/** The entity object */
Entity: any;
/** The entity name */
EntityName: string;
/** The entity collection name */
EntityCollectionName: string;
/** The @odata.etag is then used to build a cache of the response that is dependant on the fields that are retrieved */
"@odata.etag": string;
/** Unique identifier of the letter activity. */
ActivityId: DevKit.WebApi.GuidValue;
/** Type the number of minutes spent creating and sending the letter. The duration is used in reporting. */
ActualDurationMinutes: DevKit.WebApi.IntegerValue;
/** Enter the actual end date and time of the letter. By default, it displays the date and time when the activity was completed or canceled, but can be edited to capture the actual time to create and send the letter. */
ActualEnd_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue;
/** Enter the actual start date and time for the letter. By default, it displays the date and time when the activity was created, but can be edited to capture the actual time to create and send the letter. */
ActualStart_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue;
/** Type the complete recipient address for the letter to ensure timely delivery. */
Address: DevKit.WebApi.StringValue;
/** Type a category to identify the letter type, such as sales offer or past due notice, to tie the letter to a business group or function. */
Category: DevKit.WebApi.StringValue;
/** Shows who created the record. */
CreatedBy: DevKit.WebApi.LookupValueReadonly;
/** Shows the date and time when the record was created. The date and time are displayed in the time zone selected in Microsoft Dynamics 365 options. */
CreatedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly;
/** Shows who created the record on behalf of another user. */
CreatedOnBehalfBy: DevKit.WebApi.LookupValueReadonly;
/** Type the letter body or additional information to describe the letter, such as the primary message or the products and services described. */
Description: DevKit.WebApi.StringValue;
/** Select the direction of the letter as incoming or outbound. */
DirectionCode: DevKit.WebApi.BooleanValue;
/** Shows the conversion rate of the record's currency. The exchange rate is used to convert all money fields in the record from the local currency to the system's default currency. */
ExchangeRate: DevKit.WebApi.DecimalValueReadonly;
/** Unique identifier of the data import or data migration that created this record. */
ImportSequenceNumber: DevKit.WebApi.IntegerValue;
/** Shows whether the letter activity was billed as part of resolving a case. */
IsBilled: DevKit.WebApi.BooleanValue;
/** Shows whether the activity is a regular activity type or event type. */
IsRegularActivity: DevKit.WebApi.BooleanValueReadonly;
/** Shows whether the letter activity was created by a workflow rule. */
IsWorkflowCreated: DevKit.WebApi.BooleanValue;
/** Contains the date and time stamp of the last on hold time. */
LastOnHoldTime_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue;
/** Shows who last updated the record. */
ModifiedBy: DevKit.WebApi.LookupValueReadonly;
/** Shows the date and time when the record was last updated. The date and time are displayed in the time zone selected in Microsoft Dynamics 365 options. */
ModifiedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly;
/** Shows who last updated the record on behalf of another user. */
ModifiedOnBehalfBy: DevKit.WebApi.LookupValueReadonly;
/** Shows how long, in minutes, that the record was on hold. */
OnHoldTime: DevKit.WebApi.IntegerValueReadonly;
/** Date and time that the record was migrated. */
OverriddenCreatedOn_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue;
/** Enter the user who is assigned to manage the record. This field is updated every time the record is assigned to a different user */
OwnerId_systemuser: DevKit.WebApi.LookupValue;
/** Enter the team who is assigned to manage the record. This field is updated every time the record is assigned to a different team */
OwnerId_team: DevKit.WebApi.LookupValue;
/** Unique identifier of the business unit that owns the letter activity. */
OwningBusinessUnit: DevKit.WebApi.LookupValueReadonly;
/** Unique identifier of the team that owns the letter activity. */
OwningTeam: DevKit.WebApi.LookupValueReadonly;
/** Unique identifier of the user that owns the letter activity. */
OwningUser: DevKit.WebApi.LookupValueReadonly;
/** Select the priority so that preferred customers or critical issues are handled quickly. */
PriorityCode: DevKit.WebApi.OptionSetValue;
/** Shows the ID of the process. */
ProcessId: DevKit.WebApi.GuidValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_account_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_bookableresourcebooking_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_bookableresourcebookingheader_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_bulkoperation_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_campaign_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_campaignactivity_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_contact_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_contract_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_entitlement_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_entitlementtemplate_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_incident_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_invoice_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_knowledgearticle_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_knowledgebaserecord_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_lead_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_agreement_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_agreementbookingdate_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_agreementbookingincident_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_agreementbookingproduct_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_agreementbookingservice_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_agreementbookingservicetask_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_agreementbookingsetup_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_agreementinvoicedate_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_agreementinvoiceproduct_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_agreementinvoicesetup_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_bookingalertstatus_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_bookingrule_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_bookingtimestamp_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_customerasset_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_fieldservicesetting_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_incidenttypecharacteristic_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_incidenttypeproduct_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_incidenttypeservice_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_inventoryadjustment_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_inventoryadjustmentproduct_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_inventoryjournal_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_inventorytransfer_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_payment_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_paymentdetail_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_paymentmethod_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_paymentterm_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_playbookinstance_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_postalbum_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_postalcode_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_processnotes_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_productinventory_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_projectteam_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_purchaseorder_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_purchaseorderbill_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_purchaseorderproduct_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_purchaseorderreceipt_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_purchaseorderreceiptproduct_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_purchaseordersubstatus_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_quotebookingincident_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_quotebookingproduct_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_quotebookingservice_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_quotebookingservicetask_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_resourceterritory_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_rma_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_rmaproduct_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_rmareceipt_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_rmareceiptproduct_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_rmasubstatus_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_rtv_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_rtvproduct_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_rtvsubstatus_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_shipvia_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_systemuserschedulersetting_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_timegroup_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_timegroupdetail_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_timeoffrequest_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_warehouse_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_workorder_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_workordercharacteristic_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_workorderincident_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_workorderproduct_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_workorderresourcerestriction_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_workorderservice_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_msdyn_workorderservicetask_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_opportunity_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_quote_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_salesorder_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_site_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_uii_action_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_uii_hostedapplication_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_uii_nonhostedapplication_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_uii_option_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_uii_savedsession_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_uii_workflow_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_uii_workflowstep_letter: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the letter activity is associated. */
regardingobjectid_uii_workflow_workflowstep_mapping_letter: DevKit.WebApi.LookupValue;
/** Scheduled duration of the letter activity, specified in minutes. */
ScheduledDurationMinutes: DevKit.WebApi.IntegerValueReadonly;
/** Enter the expected due date and time. */
ScheduledEnd_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue;
/** Enter the expected due date and time. */
ScheduledStart_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue;
/** Unique identifier for an associated service. */
ServiceId: DevKit.WebApi.LookupValue;
/** Choose the service level agreement (SLA) that you want to apply to the Letter record. */
SLAId: DevKit.WebApi.LookupValue;
/** Last SLA that was applied to this Letter. This field is for internal use only. */
SLAInvokedId: DevKit.WebApi.LookupValueReadonly;
SLAName: DevKit.WebApi.StringValueReadonly;
/** Shows the date and time by which the activities are sorted. */
SortDate_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue;
/** Shows the ID of the stage. */
StageId: DevKit.WebApi.GuidValue;
/** Shows whether the letter is open, completed, or canceled. Completed and canceled letters are read-only and can't be edited. */
StateCode: DevKit.WebApi.OptionSetValue;
/** Select the letter's status. */
StatusCode: DevKit.WebApi.OptionSetValue;
/** Type a subcategory to identify the letter type and relate the activity to a specific product, sales region, business group, or other function. */
Subcategory: DevKit.WebApi.StringValue;
/** Type a short description about the objective or primary topic of the letter. */
Subject: DevKit.WebApi.StringValue;
/** For internal use only. */
SubscriptionId: DevKit.WebApi.GuidValue;
/** For internal use only. */
TimeZoneRuleVersionNumber: DevKit.WebApi.IntegerValue;
/** Choose the local currency for the record to make sure budgets are reported in the correct currency. */
TransactionCurrencyId: DevKit.WebApi.LookupValue;
/** For internal use only. */
TraversedPath: DevKit.WebApi.StringValue;
/** Time zone code that was in use when the record was created. */
UTCConversionTimeZoneCode: DevKit.WebApi.IntegerValue;
/** Version number of the letter. */
VersionNumber: DevKit.WebApi.BigIntValueReadonly;
/** The array of object that can cast object to ActivityPartyApi class */
ActivityParties: Array<any>;
}
}
declare namespace OptionSet {
namespace Letter {
enum PriorityCode {
/** 2 */
High,
/** 0 */
Low,
/** 1 */
Normal
}
enum StateCode {
/** 2 */
Canceled,
/** 1 */
Completed,
/** 0 */
Open
}
enum StatusCode {
/** 5 */
Canceled,
/** 2 */
Draft,
/** 1 */
Open,
/** 3 */
Received,
/** 4 */
Sent
}
enum RollupState {
/** 0 - Attribute value is yet to be calculated */
NotCalculated,
/** 1 - Attribute value has been calculated per the last update time in <AttributeSchemaName>_Date attribute */
Calculated,
/** 2 - Attribute value calculation lead to overflow error */
OverflowError,
/** 3 - Attribute value calculation failed due to an internal error, next run of calculation job will likely fix it */
OtherError,
/** 4 - Attribute value calculation failed because the maximum number of retry attempts to calculate the value were exceeded likely due to high number of concurrency and locking conflicts */
RetryLimitExceeded,
/** 5 - Attribute value calculation failed because maximum hierarchy depth limit for calculation was reached */
HierarchicalRecursionLimitReached,
/** 6 - Attribute value calculation failed because a recursive loop was detected in the hierarchy of the record */
LoopDetected
}
}
}
//{'JsForm':['Letter'],'JsWebApi':true,'IsDebugForm':true,'IsDebugWebApi':true,'Version':'2.12.31','JsFormVersion':'v2'} | the_stack |
import { BigNumber } from "bignumber.js";
import xor = require("bitwise-xor");
import promisify = require("es6-promisify");
import fs = require("fs");
import * as _ from "lodash";
import { Artifacts } from "../util/artifacts";
import { Order } from "../util/order";
import { TxParser } from "../util/parseTx";
import { ProtocolSimulator } from "../util/protocol_simulator";
import { Ring } from "../util/ring";
import { RingFactory } from "../util/ring_factory";
import { OrderParams, RingBalanceInfo, RingInfo, TransferItem } from "../util/types";
import { rawTxs } from "./rawTxs";
import { ringInfoList } from "./ringConfig";
const {
LoopringProtocolImpl,
TokenRegistry,
TokenTransferDelegate,
DummyToken,
} = new Artifacts(artifacts);
contract("LoopringProtocolImpl", (accounts: string[]) => {
const owner = accounts[0];
const order1Owner = accounts[1];
const order2Owner = accounts[2];
const order3Owner = accounts[3];
const orderAuthAddr = accounts[7]; // should generate each time in front-end. we just mock it here.
const ringOwner = accounts[6];
const feeRecepient = ringOwner;
const ringMiner = feeRecepient;
const walletAddr = accounts[8];
let loopringProtocolImpl: any;
let tokenRegistry: any;
let tokenTransferDelegate: any;
let lrcAddress: string;
let eosAddress: string;
let gtoAddress: string;
let rdnAddress: string;
let delegateAddr: string;
let lrc: any;
let eos: any;
let gto: any;
let rdn: any;
let allTokens: any[];
let allAddresses: string[];
const tokenMap = new Map();
const tokenSymbolMap = new Map();
let currBlockTimeStamp: number;
let walletSplitPercentage: number;
let ringFactory: RingFactory;
before( async () => {
[loopringProtocolImpl, tokenRegistry, tokenTransferDelegate] = await Promise.all([
LoopringProtocolImpl.deployed(),
TokenRegistry.deployed(),
TokenTransferDelegate.deployed(),
]);
lrcAddress = await tokenRegistry.getAddressBySymbol("LRC");
eosAddress = await tokenRegistry.getAddressBySymbol("EOS");
gtoAddress = await tokenRegistry.getAddressBySymbol("GTO");
rdnAddress = await tokenRegistry.getAddressBySymbol("RDN");
delegateAddr = TokenTransferDelegate.address;
const walletSplitPercentageBN = await loopringProtocolImpl.walletSplitPercentage();
walletSplitPercentage = walletSplitPercentageBN.toNumber();
tokenTransferDelegate.authorizeAddress(LoopringProtocolImpl.address);
[lrc, eos, gto, rdn] = await Promise.all([
DummyToken.at(lrcAddress),
DummyToken.at(eosAddress),
DummyToken.at(gtoAddress),
DummyToken.at(rdnAddress),
]);
tokenMap.set(lrcAddress, lrc);
tokenSymbolMap.set(lrcAddress, "LRC");
tokenMap.set(eosAddress, eos);
tokenSymbolMap.set(eosAddress, "EOS");
tokenMap.set(gtoAddress, gto);
tokenSymbolMap.set(gtoAddress, "GTO");
tokenMap.set(rdnAddress, rdn);
tokenSymbolMap.set(rdnAddress, "RDN");
const currBlockNumber = web3.eth.blockNumber;
currBlockTimeStamp = web3.eth.getBlock(currBlockNumber).timestamp;
ringFactory = new RingFactory(TokenTransferDelegate.address,
orderAuthAddr,
currBlockTimeStamp);
ringFactory.walletAddr = walletAddr;
allTokens = [eos, gto, lrc, rdn];
allAddresses = [order1Owner, order2Owner, order3Owner, feeRecepient];
// approve only once for all test cases.
for (const token of allTokens) {
for (const address of allAddresses) {
await token.approve(delegateAddr, web3.toWei(10000000000), {from: address});
}
}
});
const getTokenBalanceAsync = async (token: any, addr: string) => {
const tokenBalanceStr = await token.balanceOf(addr);
const balance = new BigNumber(tokenBalanceStr);
return balance.toNumber();
};
const getEthBalanceAsync = async (addr: string) => {
const balanceStr = await promisify(web3.eth.getBalance)(addr);
const balance = new BigNumber(balanceStr);
return balance;
};
const assertNumberEqualsWithPrecision = (n1: number, n2: number, precision: number = 8) => {
const numStr1 = n1.toPrecision(precision);
const numStr2 = n2.toPrecision(precision);
return assert.equal(Number(numStr1), Number(numStr2));
};
const clear = async (tokens: any[], addresses: string[]) => {
for (const token of tokens) {
for (const address of addresses) {
await token.setBalance(address, 0, {from: owner});
}
}
};
const approve = async (tokens: any[], addresses: string[], amounts: number[]) => {
for (let i = 0; i < tokens.length; i++) {
await tokens[i].approve(delegateAddr, 0, {from: addresses[i]});
await tokens[i].approve(delegateAddr, amounts[i], {from: addresses[i]});
}
};
const setBalanceBefore = async (ring: Ring,
miner: string,
spendableAmountSList: number[],
spendableLrcFeeAmountList: number[]) => {
const ringSize = ring.orders.length;
let lrcRewardTotal = 0;
for (let i = 0; i < ringSize; i++) {
const order = ring.orders[i];
const orderOwner = order.owner;
let balance = order.params.amountS.toNumber();
if (spendableAmountSList && spendableAmountSList[i]) {
balance = spendableAmountSList[i];
}
const tokenSAddr = order.params.tokenS;
const tokenInstance = tokenMap.get(tokenSAddr);
await tokenInstance.setBalance(orderOwner, balance);
let lrcFee = order.params.lrcFee.toNumber();
lrcRewardTotal += lrcFee;
if (spendableLrcFeeAmountList && spendableLrcFeeAmountList[i]) {
lrcFee = spendableLrcFeeAmountList[i];
}
await lrc.addBalance(orderOwner, lrcFee);
}
if (spendableLrcFeeAmountList && spendableLrcFeeAmountList[ringSize]) {
lrcRewardTotal = spendableLrcFeeAmountList[ringSize];
}
await lrc.setBalance(miner, lrcRewardTotal);
};
const setDefaultValuesForRingInfo = (ringInfo: RingInfo) => {
const ringSize = ringInfo.amountSList.length;
assert(ringSize <= 3, "invalid orders size. amountSList:" + ringInfo.amountSList);
const tokenAddresses = [eosAddress, gtoAddress, lrcAddress];
const orderOwners = [order1Owner, order2Owner, order3Owner];
ringInfo.tokenAddressList = tokenAddresses.slice(0, ringSize);
if (ringInfo.orderOwners && ringInfo.orderOwners.length > 0 &&
ringInfo.orderOwners[0] === "##sameOwners##") {
ringInfo.orderOwners = Array.from({length: ringSize}, () => orderOwners[0]);
} else {
ringInfo.orderOwners = orderOwners.slice(0, ringSize);
}
ringInfo.miner = ringOwner;
if (!ringInfo.lrcFeeAmountList || ringInfo.lrcFeeAmountList.length < ringSize) {
ringInfo.lrcFeeAmountList = Array.from({length: ringSize}, () => 1e18);
}
if (!ringInfo.buyNoMoreThanAmountBList || ringInfo.buyNoMoreThanAmountBList.length < ringSize) {
ringInfo.buyNoMoreThanAmountBList = Array.from({length: ringSize}, () => false);
}
if (!ringInfo.marginSplitPercentageList || ringInfo.marginSplitPercentageList.length < ringSize) {
ringInfo.marginSplitPercentageList = Array.from({length: ringSize}, () => 50);
}
};
const getEventsFromContract = async (contract: any, eventName: string, fromBlock: number) => {
return new Promise((resolve, reject) => {
if (!contract[eventName]) {
throw Error("TypeError: contract[eventName] is not a function: " + eventName);
}
const events = contract[eventName]({}, { fromBlock, toBlock: "latest" });
events.watch();
events.get((error: any, event: any) => {
if (!error) {
resolve(event);
} else {
throw Error("Failed to find filtered event: " + error);
}
});
events.stopWatching();
});
};
const getTransferEvents = async (tokens: string[], fromBlock: number) => {
let transferItems: Array<[string, string, number]> = [];
for (const tokenAddr of tokens) {
const tokenContractInstance = tokenMap.get(tokenAddr);
const eventArr: any = await getEventsFromContract(tokenContractInstance, "Transfer", fromBlock);
const items = eventArr.map((eventObj: any) => {
return [eventObj.args.from, eventObj.args.to, eventObj.args.value.toNumber()];
});
transferItems = transferItems.concat(items);
}
return transferItems;
};
const assertTransfers = (tranferEvents: Array<[string, string, number]>, transferList: TransferItem[]) => {
const transfersFromSimulator: Array<[string, string, number]> = [];
transferList.forEach((item) => transfersFromSimulator.push([item.fromAddress, item.toAddress, item.amount]));
const sorter = (a: [string, string, number], b: [string, string, number]) => {
if (a[0] === b[0]) {
if (a[1] === b[1]) {
return a[2] - b[2];
} else {
return a[1] > b[1] ? 1 : -1;
}
} else {
return a[0] > b[0] ? 1 : -1;
}
};
transfersFromSimulator.sort(sorter);
tranferEvents.sort(sorter);
// console.log("transfersFromSimulator:", transfersFromSimulator);
// console.log("tranferEvents from testrpc:", tranferEvents);
assert.equal(tranferEvents.length, transfersFromSimulator.length, "transfer amounts not match");
for (let i = 0; i < tranferEvents.length; i++) {
const transferFromEvent = tranferEvents[i];
const transferFromSimulator = transfersFromSimulator[i];
assert.equal(transferFromEvent[0], transferFromSimulator[0]);
assert.equal(transferFromEvent[1], transferFromSimulator[1]);
assertNumberEqualsWithPrecision(transferFromEvent[2], transferFromSimulator[2]);
}
};
describe("submitRing", () => {
const protocolAbi = fs.readFileSync("ABI/version15/LoopringProtocolImpl.abi", "ascii");
const txParser = new TxParser(protocolAbi);
const ringInfoListFromRawTxs = rawTxs.map((tx) => txParser.parseSubmitRingTx(tx));
// const ringInfoListForTest = ringInfoListFromRawTxs;
const ringInfoListForTest = ringInfoList.concat(ringInfoListFromRawTxs);
let eventFromBlock: number = 0;
const cancelOrderAmount = async (order: Order, cancelAmount: number) => {
const addresses = [order.owner,
order.params.tokenS,
order.params.tokenB,
order.params.walletAddr,
order.params.authAddr];
const orderValues = [order.params.amountS,
order.params.amountB,
order.params.validSince,
order.params.validUntil,
order.params.lrcFee,
cancelAmount];
await loopringProtocolImpl.cancelOrder(addresses,
orderValues,
order.params.buyNoMoreThanAmountB,
order.params.marginSplitPercentage,
order.params.v,
order.params.r,
order.params.s,
{from: order.owner});
};
for (const ringInfo of ringInfoListForTest) {
it(ringInfo.description, async () => {
setDefaultValuesForRingInfo(ringInfo);
const ring = await ringFactory.generateRing(ringInfo);
assert(ring.orders[0].isValidSignature(), "invalid signature");
await setBalanceBefore(ring,
feeRecepient,
ringInfo.spendableAmountSList,
ringInfo.spendableLrcFeeAmountList);
const p = ringFactory.ringToSubmitableParams(ring);
const simulator = new ProtocolSimulator(ring,
lrcAddress,
tokenRegistry.address,
walletSplitPercentage);
// set order's cancelOrFilledAmount
if (ringInfo.orderFilledOrCancelledAmountList) {
ringInfo.orderFilledOrCancelledAmountList.forEach(async (n, i) => {
if (n > 0) {
await cancelOrderAmount(ring.orders[i], n);
}
});
}
const simulatorReport = await simulator.simulateAndReport([],
[],
ringInfo.orderFilledOrCancelledAmountList,
ringInfo.verbose);
const tx = await loopringProtocolImpl.submitRing(p.data,
{from: feeRecepient});
// console.log("tx.receipt.logs: ", tx.receipt.logs);
console.log("gas used: ", tx.receipt.gasUsed);
const tokenSet = new Set([lrcAddress]);
for (const order of ring.orders) {
tokenSet.add(order.params.tokenS);
}
const tokensInRing = [...tokenSet];
const transferEvents = await getTransferEvents(tokensInRing, eventFromBlock);
assertTransfers(transferEvents, simulatorReport.transferList);
await clear([eos, gto, lrc], [order1Owner, order2Owner, feeRecepient]);
eventFromBlock = web3.eth.blockNumber + 1;
});
}
});
describe("cancelOrder", () => {
it("should be able to set order cancelled amount by order owner", async () => {
const feeSelectionList = [0, 0];
const orderPrams = {
delegateContract: delegateAddr,
tokenS: eosAddress,
tokenB: gtoAddress,
amountS: new BigNumber(1000e18),
amountB: new BigNumber(100e18),
validSince: new BigNumber(currBlockTimeStamp),
validUntil: new BigNumber((currBlockTimeStamp + 360000) + 130),
lrcFee: new BigNumber(1e18),
buyNoMoreThanAmountB: false,
marginSplitPercentage: 0,
authAddr: orderAuthAddr,
walletAddr,
};
const order = new Order(order1Owner, orderPrams);
await order.signAsync();
const cancelAmount = new BigNumber(100e18);
const addresses = [order.owner,
order.params.tokenS,
order.params.tokenB,
order.params.walletAddr,
order.params.authAddr];
const orderValues = [order.params.amountS,
order.params.amountB,
order.params.validSince,
order.params.validUntil,
order.params.lrcFee,
cancelAmount];
const cancelledOrFilledAmount0 = await tokenTransferDelegate.cancelledOrFilled(order.params.orderHashHex);
const tx = await loopringProtocolImpl.cancelOrder(addresses,
orderValues,
order.params.buyNoMoreThanAmountB,
order.params.marginSplitPercentage,
order.params.v,
order.params.r,
order.params.s,
{from: order.owner});
const cancelledOrFilledAmount1 = await tokenTransferDelegate.cancelledOrFilled(order.params.orderHashHex);
assert.equal(cancelledOrFilledAmount1.minus(cancelledOrFilledAmount0).toNumber(),
cancelAmount.toNumber(), "cancelled amount not match");
});
it("should not be able to cancell order by other address", async () => {
const feeSelectionList = [0, 0];
const orderPrams = {
delegateContract: delegateAddr,
tokenS: eosAddress,
tokenB: gtoAddress,
amountS: new BigNumber(1000e18),
amountB: new BigNumber(100e18),
validSince: new BigNumber(currBlockTimeStamp),
validUntil: new BigNumber((currBlockTimeStamp + 360000) + 130),
lrcFee: new BigNumber(1e18),
buyNoMoreThanAmountB: false,
marginSplitPercentage: 0,
authAddr: orderAuthAddr,
walletAddr,
};
const order = new Order(order1Owner, orderPrams);
await order.signAsync();
const cancelAmount = new BigNumber(100e18);
const addresses = [order.owner,
order.params.tokenS,
order.params.tokenB,
order.params.walletAddr,
order.params.authAddr];
const orderValues = [order.params.amountS,
order.params.amountB,
order.params.validSince,
order.params.validUntil,
order.params.lrcFee,
cancelAmount];
try {
const tx = await loopringProtocolImpl.cancelOrder(addresses,
orderValues,
order.params.buyNoMoreThanAmountB,
order.params.marginSplitPercentage,
order.params.v,
order.params.r,
order.params.s,
{from: order2Owner});
} catch (err) {
const errMsg = `${err}`;
assert(_.includes(errMsg, "Error: VM Exception while processing transaction: revert"),
`Expected contract to throw, got: ${err}`);
}
});
});
describe("cancelAllOrders", () => {
it("should be able to set cutoffs", async () => {
await loopringProtocolImpl.cancelAllOrders(new BigNumber(1508566125), {from: order2Owner});
const cutoff = await tokenTransferDelegate.cutoffs(order2Owner);
assert.equal(cutoff.toNumber(), 1508566125, "cutoff not set correctly");
});
});
describe("cancelAllOrdersByTradingPair", () => {
it("should be able to set trading pair cutoffs", async () => {
await loopringProtocolImpl.cancelAllOrdersByTradingPair(eosAddress,
gtoAddress,
new BigNumber(1508566125),
{from: order2Owner});
const cutoff = await loopringProtocolImpl.getTradingPairCutoffs(order2Owner,
eosAddress,
gtoAddress);
assert.equal(cutoff.toNumber(), 1508566125, "trading pair cutoff not set correctly");
});
});
}); | the_stack |
import {Request} from '../lib/request';
import {Response} from '../lib/response';
import {AWSError} from '../lib/error';
import {Service} from '../lib/service';
import {ServiceConfigurationOptions} from '../lib/service';
import {ConfigBase as Config} from '../lib/config-base';
interface Blob {}
declare class DevOpsGuru extends Service {
/**
* Constructs a service object. This object has one method for each API operation.
*/
constructor(options?: DevOpsGuru.Types.ClientConfiguration)
config: Config & DevOpsGuru.Types.ClientConfiguration;
/**
* Adds a notification channel to DevOps Guru. A notification channel is used to notify you about important DevOps Guru events, such as when an insight is generated. If you use an Amazon SNS topic in another account, you must attach a policy to it that grants DevOps Guru permission to it notifications. DevOps Guru adds the required policy on your behalf to send notifications using Amazon SNS in your account. For more information, see Permissions for cross account Amazon SNS topics. If you use an Amazon SNS topic that is encrypted by an AWS Key Management Service customer-managed key (CMK), then you must add permissions to the CMK. For more information, see Permissions for AWS KMS–encrypted Amazon SNS topics.
*/
addNotificationChannel(params: DevOpsGuru.Types.AddNotificationChannelRequest, callback?: (err: AWSError, data: DevOpsGuru.Types.AddNotificationChannelResponse) => void): Request<DevOpsGuru.Types.AddNotificationChannelResponse, AWSError>;
/**
* Adds a notification channel to DevOps Guru. A notification channel is used to notify you about important DevOps Guru events, such as when an insight is generated. If you use an Amazon SNS topic in another account, you must attach a policy to it that grants DevOps Guru permission to it notifications. DevOps Guru adds the required policy on your behalf to send notifications using Amazon SNS in your account. For more information, see Permissions for cross account Amazon SNS topics. If you use an Amazon SNS topic that is encrypted by an AWS Key Management Service customer-managed key (CMK), then you must add permissions to the CMK. For more information, see Permissions for AWS KMS–encrypted Amazon SNS topics.
*/
addNotificationChannel(callback?: (err: AWSError, data: DevOpsGuru.Types.AddNotificationChannelResponse) => void): Request<DevOpsGuru.Types.AddNotificationChannelResponse, AWSError>;
/**
* Returns the number of open reactive insights, the number of open proactive insights, and the number of metrics analyzed in your AWS account. Use these numbers to gauge the health of operations in your AWS account.
*/
describeAccountHealth(params: DevOpsGuru.Types.DescribeAccountHealthRequest, callback?: (err: AWSError, data: DevOpsGuru.Types.DescribeAccountHealthResponse) => void): Request<DevOpsGuru.Types.DescribeAccountHealthResponse, AWSError>;
/**
* Returns the number of open reactive insights, the number of open proactive insights, and the number of metrics analyzed in your AWS account. Use these numbers to gauge the health of operations in your AWS account.
*/
describeAccountHealth(callback?: (err: AWSError, data: DevOpsGuru.Types.DescribeAccountHealthResponse) => void): Request<DevOpsGuru.Types.DescribeAccountHealthResponse, AWSError>;
/**
* For the time range passed in, returns the number of open reactive insight that were created, the number of open proactive insights that were created, and the Mean Time to Recover (MTTR) for all closed reactive insights.
*/
describeAccountOverview(params: DevOpsGuru.Types.DescribeAccountOverviewRequest, callback?: (err: AWSError, data: DevOpsGuru.Types.DescribeAccountOverviewResponse) => void): Request<DevOpsGuru.Types.DescribeAccountOverviewResponse, AWSError>;
/**
* For the time range passed in, returns the number of open reactive insight that were created, the number of open proactive insights that were created, and the Mean Time to Recover (MTTR) for all closed reactive insights.
*/
describeAccountOverview(callback?: (err: AWSError, data: DevOpsGuru.Types.DescribeAccountOverviewResponse) => void): Request<DevOpsGuru.Types.DescribeAccountOverviewResponse, AWSError>;
/**
* Returns details about an anomaly that you specify using its ID.
*/
describeAnomaly(params: DevOpsGuru.Types.DescribeAnomalyRequest, callback?: (err: AWSError, data: DevOpsGuru.Types.DescribeAnomalyResponse) => void): Request<DevOpsGuru.Types.DescribeAnomalyResponse, AWSError>;
/**
* Returns details about an anomaly that you specify using its ID.
*/
describeAnomaly(callback?: (err: AWSError, data: DevOpsGuru.Types.DescribeAnomalyResponse) => void): Request<DevOpsGuru.Types.DescribeAnomalyResponse, AWSError>;
/**
* Returns details about an insight that you specify using its ID.
*/
describeInsight(params: DevOpsGuru.Types.DescribeInsightRequest, callback?: (err: AWSError, data: DevOpsGuru.Types.DescribeInsightResponse) => void): Request<DevOpsGuru.Types.DescribeInsightResponse, AWSError>;
/**
* Returns details about an insight that you specify using its ID.
*/
describeInsight(callback?: (err: AWSError, data: DevOpsGuru.Types.DescribeInsightResponse) => void): Request<DevOpsGuru.Types.DescribeInsightResponse, AWSError>;
/**
* Returns the number of open proactive insights, open reactive insights, and the Mean Time to Recover (MTTR) for all closed insights in resource collections in your account. You specify the type of AWS resources collection. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze only the AWS resources that are defined in the stacks.
*/
describeResourceCollectionHealth(params: DevOpsGuru.Types.DescribeResourceCollectionHealthRequest, callback?: (err: AWSError, data: DevOpsGuru.Types.DescribeResourceCollectionHealthResponse) => void): Request<DevOpsGuru.Types.DescribeResourceCollectionHealthResponse, AWSError>;
/**
* Returns the number of open proactive insights, open reactive insights, and the Mean Time to Recover (MTTR) for all closed insights in resource collections in your account. You specify the type of AWS resources collection. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze only the AWS resources that are defined in the stacks.
*/
describeResourceCollectionHealth(callback?: (err: AWSError, data: DevOpsGuru.Types.DescribeResourceCollectionHealthResponse) => void): Request<DevOpsGuru.Types.DescribeResourceCollectionHealthResponse, AWSError>;
/**
* Returns the integration status of services that are integrated with DevOps Guru. The one service that can be integrated with DevOps Guru is AWS Systems Manager, which can be used to create an OpsItem for each generated insight.
*/
describeServiceIntegration(params: DevOpsGuru.Types.DescribeServiceIntegrationRequest, callback?: (err: AWSError, data: DevOpsGuru.Types.DescribeServiceIntegrationResponse) => void): Request<DevOpsGuru.Types.DescribeServiceIntegrationResponse, AWSError>;
/**
* Returns the integration status of services that are integrated with DevOps Guru. The one service that can be integrated with DevOps Guru is AWS Systems Manager, which can be used to create an OpsItem for each generated insight.
*/
describeServiceIntegration(callback?: (err: AWSError, data: DevOpsGuru.Types.DescribeServiceIntegrationResponse) => void): Request<DevOpsGuru.Types.DescribeServiceIntegrationResponse, AWSError>;
/**
* Returns lists AWS resources that are of the specified resource collection type. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze only the AWS resources that are defined in the stacks.
*/
getResourceCollection(params: DevOpsGuru.Types.GetResourceCollectionRequest, callback?: (err: AWSError, data: DevOpsGuru.Types.GetResourceCollectionResponse) => void): Request<DevOpsGuru.Types.GetResourceCollectionResponse, AWSError>;
/**
* Returns lists AWS resources that are of the specified resource collection type. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze only the AWS resources that are defined in the stacks.
*/
getResourceCollection(callback?: (err: AWSError, data: DevOpsGuru.Types.GetResourceCollectionResponse) => void): Request<DevOpsGuru.Types.GetResourceCollectionResponse, AWSError>;
/**
* Returns a list of the anomalies that belong to an insight that you specify using its ID.
*/
listAnomaliesForInsight(params: DevOpsGuru.Types.ListAnomaliesForInsightRequest, callback?: (err: AWSError, data: DevOpsGuru.Types.ListAnomaliesForInsightResponse) => void): Request<DevOpsGuru.Types.ListAnomaliesForInsightResponse, AWSError>;
/**
* Returns a list of the anomalies that belong to an insight that you specify using its ID.
*/
listAnomaliesForInsight(callback?: (err: AWSError, data: DevOpsGuru.Types.ListAnomaliesForInsightResponse) => void): Request<DevOpsGuru.Types.ListAnomaliesForInsightResponse, AWSError>;
/**
* Returns a list of the events emitted by the resources that are evaluated by DevOps Guru. You can use filters to specify which events are returned.
*/
listEvents(params: DevOpsGuru.Types.ListEventsRequest, callback?: (err: AWSError, data: DevOpsGuru.Types.ListEventsResponse) => void): Request<DevOpsGuru.Types.ListEventsResponse, AWSError>;
/**
* Returns a list of the events emitted by the resources that are evaluated by DevOps Guru. You can use filters to specify which events are returned.
*/
listEvents(callback?: (err: AWSError, data: DevOpsGuru.Types.ListEventsResponse) => void): Request<DevOpsGuru.Types.ListEventsResponse, AWSError>;
/**
* Returns a list of insights in your AWS account. You can specify which insights are returned by their start time and status (ONGOING, CLOSED, or ANY).
*/
listInsights(params: DevOpsGuru.Types.ListInsightsRequest, callback?: (err: AWSError, data: DevOpsGuru.Types.ListInsightsResponse) => void): Request<DevOpsGuru.Types.ListInsightsResponse, AWSError>;
/**
* Returns a list of insights in your AWS account. You can specify which insights are returned by their start time and status (ONGOING, CLOSED, or ANY).
*/
listInsights(callback?: (err: AWSError, data: DevOpsGuru.Types.ListInsightsResponse) => void): Request<DevOpsGuru.Types.ListInsightsResponse, AWSError>;
/**
* Returns a list of notification channels configured for DevOps Guru. Each notification channel is used to notify you when DevOps Guru generates an insight that contains information about how to improve your operations. The one supported notification channel is Amazon Simple Notification Service (Amazon SNS).
*/
listNotificationChannels(params: DevOpsGuru.Types.ListNotificationChannelsRequest, callback?: (err: AWSError, data: DevOpsGuru.Types.ListNotificationChannelsResponse) => void): Request<DevOpsGuru.Types.ListNotificationChannelsResponse, AWSError>;
/**
* Returns a list of notification channels configured for DevOps Guru. Each notification channel is used to notify you when DevOps Guru generates an insight that contains information about how to improve your operations. The one supported notification channel is Amazon Simple Notification Service (Amazon SNS).
*/
listNotificationChannels(callback?: (err: AWSError, data: DevOpsGuru.Types.ListNotificationChannelsResponse) => void): Request<DevOpsGuru.Types.ListNotificationChannelsResponse, AWSError>;
/**
* Returns a list of a specified insight's recommendations. Each recommendation includes a list of related metrics and a list of related events.
*/
listRecommendations(params: DevOpsGuru.Types.ListRecommendationsRequest, callback?: (err: AWSError, data: DevOpsGuru.Types.ListRecommendationsResponse) => void): Request<DevOpsGuru.Types.ListRecommendationsResponse, AWSError>;
/**
* Returns a list of a specified insight's recommendations. Each recommendation includes a list of related metrics and a list of related events.
*/
listRecommendations(callback?: (err: AWSError, data: DevOpsGuru.Types.ListRecommendationsResponse) => void): Request<DevOpsGuru.Types.ListRecommendationsResponse, AWSError>;
/**
* Collects customer feedback about the specified insight.
*/
putFeedback(params: DevOpsGuru.Types.PutFeedbackRequest, callback?: (err: AWSError, data: DevOpsGuru.Types.PutFeedbackResponse) => void): Request<DevOpsGuru.Types.PutFeedbackResponse, AWSError>;
/**
* Collects customer feedback about the specified insight.
*/
putFeedback(callback?: (err: AWSError, data: DevOpsGuru.Types.PutFeedbackResponse) => void): Request<DevOpsGuru.Types.PutFeedbackResponse, AWSError>;
/**
* Removes a notification channel from DevOps Guru. A notification channel is used to notify you when DevOps Guru generates an insight that contains information about how to improve your operations.
*/
removeNotificationChannel(params: DevOpsGuru.Types.RemoveNotificationChannelRequest, callback?: (err: AWSError, data: DevOpsGuru.Types.RemoveNotificationChannelResponse) => void): Request<DevOpsGuru.Types.RemoveNotificationChannelResponse, AWSError>;
/**
* Removes a notification channel from DevOps Guru. A notification channel is used to notify you when DevOps Guru generates an insight that contains information about how to improve your operations.
*/
removeNotificationChannel(callback?: (err: AWSError, data: DevOpsGuru.Types.RemoveNotificationChannelResponse) => void): Request<DevOpsGuru.Types.RemoveNotificationChannelResponse, AWSError>;
/**
* Returns a list of insights in your AWS account. You can specify which insights are returned by their start time, one or more statuses (ONGOING, CLOSED, and CLOSED), one or more severities (LOW, MEDIUM, and HIGH), and type (REACTIVE or PROACTIVE). Use the Filters parameter to specify status and severity search parameters. Use the Type parameter to specify REACTIVE or PROACTIVE in your search.
*/
searchInsights(params: DevOpsGuru.Types.SearchInsightsRequest, callback?: (err: AWSError, data: DevOpsGuru.Types.SearchInsightsResponse) => void): Request<DevOpsGuru.Types.SearchInsightsResponse, AWSError>;
/**
* Returns a list of insights in your AWS account. You can specify which insights are returned by their start time, one or more statuses (ONGOING, CLOSED, and CLOSED), one or more severities (LOW, MEDIUM, and HIGH), and type (REACTIVE or PROACTIVE). Use the Filters parameter to specify status and severity search parameters. Use the Type parameter to specify REACTIVE or PROACTIVE in your search.
*/
searchInsights(callback?: (err: AWSError, data: DevOpsGuru.Types.SearchInsightsResponse) => void): Request<DevOpsGuru.Types.SearchInsightsResponse, AWSError>;
/**
* Updates the collection of resources that DevOps Guru analyzes. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze only the AWS resources that are defined in the stacks. This method also creates the IAM role required for you to use DevOps Guru.
*/
updateResourceCollection(params: DevOpsGuru.Types.UpdateResourceCollectionRequest, callback?: (err: AWSError, data: DevOpsGuru.Types.UpdateResourceCollectionResponse) => void): Request<DevOpsGuru.Types.UpdateResourceCollectionResponse, AWSError>;
/**
* Updates the collection of resources that DevOps Guru analyzes. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze only the AWS resources that are defined in the stacks. This method also creates the IAM role required for you to use DevOps Guru.
*/
updateResourceCollection(callback?: (err: AWSError, data: DevOpsGuru.Types.UpdateResourceCollectionResponse) => void): Request<DevOpsGuru.Types.UpdateResourceCollectionResponse, AWSError>;
/**
* Enables or disables integration with a service that can be integrated with DevOps Guru. The one service that can be integrated with DevOps Guru is AWS Systems Manager, which can be used to create an OpsItem for each generated insight.
*/
updateServiceIntegration(params: DevOpsGuru.Types.UpdateServiceIntegrationRequest, callback?: (err: AWSError, data: DevOpsGuru.Types.UpdateServiceIntegrationResponse) => void): Request<DevOpsGuru.Types.UpdateServiceIntegrationResponse, AWSError>;
/**
* Enables or disables integration with a service that can be integrated with DevOps Guru. The one service that can be integrated with DevOps Guru is AWS Systems Manager, which can be used to create an OpsItem for each generated insight.
*/
updateServiceIntegration(callback?: (err: AWSError, data: DevOpsGuru.Types.UpdateServiceIntegrationResponse) => void): Request<DevOpsGuru.Types.UpdateServiceIntegrationResponse, AWSError>;
}
declare namespace DevOpsGuru {
export interface AddNotificationChannelRequest {
/**
* A NotificationChannelConfig object that specifies what type of notification channel to add. The one supported notification channel is Amazon Simple Notification Service (Amazon SNS).
*/
Config: NotificationChannelConfig;
}
export interface AddNotificationChannelResponse {
/**
* The ID of the added notification channel.
*/
Id: NotificationChannelId;
}
export type AnomalyId = string;
export type AnomalyLimit = number;
export type AnomalySeverity = "LOW"|"MEDIUM"|"HIGH"|string;
export interface AnomalySourceDetails {
/**
* An array of CloudWatchMetricsDetail object that contains information about the analyzed metrics that displayed anomalous behavior.
*/
CloudWatchMetrics?: CloudWatchMetricsDetails;
}
export type AnomalyStatus = "ONGOING"|"CLOSED"|string;
export interface AnomalyTimeRange {
/**
* The time when the anomalous behavior started.
*/
StartTime: Timestamp;
/**
* The time when the anomalous behavior ended.
*/
EndTime?: Timestamp;
}
export type Channels = NotificationChannel[];
export interface CloudFormationCollection {
/**
* An array of CloudFormation stack names.
*/
StackNames?: StackNames;
}
export interface CloudFormationCollectionFilter {
/**
* An array of CloudFormation stack names.
*/
StackNames?: StackNames;
}
export interface CloudFormationHealth {
/**
* The name of the CloudFormation stack.
*/
StackName?: StackName;
/**
* Information about the health of the AWS resources in your account that are specified by an AWS CloudFormation stack, including the number of open proactive, open reactive insights, and the Mean Time to Recover (MTTR) of closed insights.
*/
Insight?: InsightHealth;
}
export type CloudFormationHealths = CloudFormationHealth[];
export interface CloudWatchMetricsDetail {
/**
* The name of the CloudWatch metric.
*/
MetricName?: CloudWatchMetricsMetricName;
/**
* The namespace of the CloudWatch metric. A namespace is a container for CloudWatch metrics.
*/
Namespace?: CloudWatchMetricsNamespace;
/**
* An array of CloudWatch dimensions associated with
*/
Dimensions?: CloudWatchMetricsDimensions;
/**
* The type of statistic associated with the CloudWatch metric. For more information, see Statistics in the Amazon CloudWatch User Guide.
*/
Stat?: CloudWatchMetricsStat;
/**
* The unit of measure used for the CloudWatch metric. For example, Bytes, Seconds, Count, and Percent.
*/
Unit?: CloudWatchMetricsUnit;
/**
* The length of time associated with the CloudWatch metric in number of seconds.
*/
Period?: CloudWatchMetricsPeriod;
}
export type CloudWatchMetricsDetails = CloudWatchMetricsDetail[];
export interface CloudWatchMetricsDimension {
/**
* The name of the CloudWatch dimension.
*/
Name?: CloudWatchMetricsDimensionName;
/**
* The value of the CloudWatch dimension.
*/
Value?: CloudWatchMetricsDimensionValue;
}
export type CloudWatchMetricsDimensionName = string;
export type CloudWatchMetricsDimensionValue = string;
export type CloudWatchMetricsDimensions = CloudWatchMetricsDimension[];
export type CloudWatchMetricsMetricName = string;
export type CloudWatchMetricsNamespace = string;
export type CloudWatchMetricsPeriod = number;
export type CloudWatchMetricsStat = "Sum"|"Average"|"SampleCount"|"Minimum"|"Maximum"|"p99"|"p90"|"p50"|string;
export type CloudWatchMetricsUnit = string;
export interface DescribeAccountHealthRequest {
}
export interface DescribeAccountHealthResponse {
/**
* An integer that specifies the number of open reactive insights in your AWS account.
*/
OpenReactiveInsights: NumOpenReactiveInsights;
/**
* An integer that specifies the number of open proactive insights in your AWS account.
*/
OpenProactiveInsights: NumOpenProactiveInsights;
/**
* An integer that specifies the number of metrics that have been analyzed in your AWS account.
*/
MetricsAnalyzed: NumMetricsAnalyzed;
/**
* The number of Amazon DevOps Guru resource analysis hours billed to the current AWS account in the last hour.
*/
ResourceHours: ResourceHours;
}
export interface DescribeAccountOverviewRequest {
/**
* The start of the time range passed in. The start time granularity is at the day level. The floor of the start time is used. Returned information occurred after this day.
*/
FromTime: Timestamp;
/**
* The end of the time range passed in. The start time granularity is at the day level. The floor of the start time is used. Returned information occurred before this day. If this is not specified, then the current day is used.
*/
ToTime?: Timestamp;
}
export interface DescribeAccountOverviewResponse {
/**
* An integer that specifies the number of open reactive insights in your AWS account that were created during the time range passed in.
*/
ReactiveInsights: NumReactiveInsights;
/**
* An integer that specifies the number of open proactive insights in your AWS account that were created during the time range passed in.
*/
ProactiveInsights: NumProactiveInsights;
/**
* The Mean Time to Recover (MTTR) for all closed insights that were created during the time range passed in.
*/
MeanTimeToRecoverInMilliseconds: MeanTimeToRecoverInMilliseconds;
}
export interface DescribeAnomalyRequest {
/**
* The ID of the anomaly.
*/
Id: AnomalyId;
}
export interface DescribeAnomalyResponse {
/**
* An ReactiveAnomaly object that represents the requested anomaly.
*/
ProactiveAnomaly?: ProactiveAnomaly;
/**
* An ProactiveAnomaly object that represents the requested anomaly.
*/
ReactiveAnomaly?: ReactiveAnomaly;
}
export interface DescribeInsightRequest {
/**
* The ID of the insight.
*/
Id: InsightId;
}
export interface DescribeInsightResponse {
/**
* An ProactiveInsight object that represents the requested insight.
*/
ProactiveInsight?: ProactiveInsight;
/**
* An ReactiveInsight object that represents the requested insight.
*/
ReactiveInsight?: ReactiveInsight;
}
export interface DescribeResourceCollectionHealthRequest {
/**
* An AWS resource collection type. This type specifies how analyzed AWS resources are defined. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze only the AWS resources that are defined in the stacks.
*/
ResourceCollectionType: ResourceCollectionType;
/**
* The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
*/
NextToken?: UuidNextToken;
}
export interface DescribeResourceCollectionHealthResponse {
/**
* The returned CloudFormationHealthOverview object that contains an InsightHealthOverview object with the requested system health information.
*/
CloudFormation: CloudFormationHealths;
/**
* The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
*/
NextToken?: UuidNextToken;
}
export interface DescribeServiceIntegrationRequest {
}
export interface DescribeServiceIntegrationResponse {
ServiceIntegration?: ServiceIntegrationConfig;
}
export interface EndTimeRange {
/**
* The earliest end time in the time range.
*/
FromTime?: Timestamp;
/**
* The latest end time in the time range.
*/
ToTime?: Timestamp;
}
export interface Event {
ResourceCollection?: ResourceCollection;
/**
* The ID of the event.
*/
Id?: EventId;
/**
* A Timestamp that specifies the time the event occurred.
*/
Time?: Timestamp;
/**
* The AWS source that emitted the event.
*/
EventSource?: EventSource;
/**
* The name of the event.
*/
Name?: EventName;
/**
* The source, AWS_CLOUD_TRAIL or AWS_CODE_DEPLOY, where DevOps Guru analysis found the event.
*/
DataSource?: EventDataSource;
/**
* The class of the event. The class specifies what the event is related to, such as an infrastructure change, a deployment, or a schema change.
*/
EventClass?: EventClass;
/**
* An EventResource object that contains information about the resource that emitted the event.
*/
Resources?: EventResources;
}
export type EventClass = "INFRASTRUCTURE"|"DEPLOYMENT"|"SECURITY_CHANGE"|"CONFIG_CHANGE"|"SCHEMA_CHANGE"|string;
export type EventDataSource = "AWS_CLOUD_TRAIL"|"AWS_CODE_DEPLOY"|string;
export type EventId = string;
export type EventName = string;
export interface EventResource {
/**
* The type of resource that emitted an event.
*/
Type?: EventResourceType;
/**
* The name of the resource that emitted an event.
*/
Name?: EventResourceName;
/**
* The Amazon Resource Name (ARN) of the resource that emitted an event.
*/
Arn?: EventResourceArn;
}
export type EventResourceArn = string;
export type EventResourceName = string;
export type EventResourceType = string;
export type EventResources = EventResource[];
export type EventSource = string;
export interface EventTimeRange {
/**
* The time when the event started.
*/
FromTime: Timestamp;
/**
* The time when the event ended.
*/
ToTime: Timestamp;
}
export type Events = Event[];
export interface GetResourceCollectionRequest {
/**
* The type of AWS resource collections to return. The one valid value is CLOUD_FORMATION for AWS CloudFormation stacks.
*/
ResourceCollectionType: ResourceCollectionType;
/**
* The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
*/
NextToken?: UuidNextToken;
}
export interface GetResourceCollectionResponse {
/**
* The requested list of AWS resource collections. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze only the AWS resources that are defined in the stacks.
*/
ResourceCollection?: ResourceCollectionFilter;
/**
* The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
*/
NextToken?: UuidNextToken;
}
export interface InsightFeedback {
/**
* The insight feedback ID.
*/
Id?: InsightId;
/**
* The feedback provided by the customer.
*/
Feedback?: InsightFeedbackOption;
}
export type InsightFeedbackOption = "VALID_COLLECTION"|"RECOMMENDATION_USEFUL"|"ALERT_TOO_SENSITIVE"|"DATA_NOISY_ANOMALY"|"DATA_INCORRECT"|string;
export interface InsightHealth {
/**
* The number of open proactive insights.
*/
OpenProactiveInsights?: NumOpenProactiveInsights;
/**
* The number of open reactive insights.
*/
OpenReactiveInsights?: NumOpenReactiveInsights;
/**
* The Meant Time to Recover (MTTR) for the insight.
*/
MeanTimeToRecoverInMilliseconds?: MeanTimeToRecoverInMilliseconds;
}
export type InsightId = string;
export type InsightName = string;
export type InsightSeverities = InsightSeverity[];
export type InsightSeverity = "LOW"|"MEDIUM"|"HIGH"|string;
export type InsightStatus = "ONGOING"|"CLOSED"|string;
export type InsightStatuses = InsightStatus[];
export interface InsightTimeRange {
/**
* The time when the behavior described in an insight started.
*/
StartTime: Timestamp;
/**
* The time when the behavior described in an insight ended.
*/
EndTime?: Timestamp;
}
export type InsightType = "REACTIVE"|"PROACTIVE"|string;
export type ListAnomaliesForInsightMaxResults = number;
export interface ListAnomaliesForInsightRequest {
/**
* The ID of the insight. The returned anomalies belong to this insight.
*/
InsightId: InsightId;
/**
* A time range used to specify when the requested anomalies started. All returned anomalies started during this time range.
*/
StartTimeRange?: StartTimeRange;
/**
* The maximum number of results to return with a single call. To retrieve the remaining results, make another call with the returned nextToken value.
*/
MaxResults?: ListAnomaliesForInsightMaxResults;
/**
* The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
*/
NextToken?: UuidNextToken;
}
export interface ListAnomaliesForInsightResponse {
/**
* An array of ProactiveAnomalySummary objects that represent the requested anomalies
*/
ProactiveAnomalies?: ProactiveAnomalies;
/**
* An array of ReactiveAnomalySummary objects that represent the requested anomalies
*/
ReactiveAnomalies?: ReactiveAnomalies;
/**
* The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
*/
NextToken?: UuidNextToken;
}
export interface ListEventsFilters {
/**
* An ID of an insight that is related to the events you want to filter for.
*/
InsightId?: InsightId;
/**
* A time range during which you want the filtered events to have occurred.
*/
EventTimeRange?: EventTimeRange;
/**
* The class of the events you want to filter for, such as an infrastructure change, a deployment, or a schema change.
*/
EventClass?: EventClass;
/**
* The AWS source that emitted the events you want to filter for.
*/
EventSource?: EventSource;
/**
* The source, AWS_CLOUD_TRAIL or AWS_CODE_DEPLOY, of the events you want returned.
*/
DataSource?: EventDataSource;
ResourceCollection?: ResourceCollection;
}
export type ListEventsMaxResults = number;
export interface ListEventsRequest {
/**
* A ListEventsFilters object used to specify which events to return.
*/
Filters: ListEventsFilters;
/**
* The maximum number of results to return with a single call. To retrieve the remaining results, make another call with the returned nextToken value.
*/
MaxResults?: ListEventsMaxResults;
/**
* The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
*/
NextToken?: UuidNextToken;
}
export interface ListEventsResponse {
/**
* A list of the requested events.
*/
Events: Events;
/**
* The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
*/
NextToken?: UuidNextToken;
}
export interface ListInsightsAnyStatusFilter {
/**
* Use to filter for either REACTIVE or PROACTIVE insights.
*/
Type: InsightType;
/**
* A time range used to specify when the behavior of the filtered insights started.
*/
StartTimeRange: StartTimeRange;
}
export interface ListInsightsClosedStatusFilter {
/**
* Use to filter for either REACTIVE or PROACTIVE insights.
*/
Type: InsightType;
/**
* A time range used to specify when the behavior of the filtered insights ended.
*/
EndTimeRange: EndTimeRange;
}
export type ListInsightsMaxResults = number;
export interface ListInsightsOngoingStatusFilter {
/**
* Use to filter for either REACTIVE or PROACTIVE insights.
*/
Type: InsightType;
}
export interface ListInsightsRequest {
/**
* A filter used to filter the returned insights by their status. You can specify one status filter.
*/
StatusFilter: ListInsightsStatusFilter;
/**
* The maximum number of results to return with a single call. To retrieve the remaining results, make another call with the returned nextToken value.
*/
MaxResults?: ListInsightsMaxResults;
/**
* The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
*/
NextToken?: UuidNextToken;
}
export interface ListInsightsResponse {
/**
* The returned list of proactive insights.
*/
ProactiveInsights?: ProactiveInsights;
/**
* The returned list of reactive insights.
*/
ReactiveInsights?: ReactiveInsights;
/**
* The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
*/
NextToken?: UuidNextToken;
}
export interface ListInsightsStatusFilter {
/**
* A ListInsightsAnyStatusFilter that specifies ongoing insights that are either REACTIVE or PROACTIVE.
*/
Ongoing?: ListInsightsOngoingStatusFilter;
/**
* A ListInsightsClosedStatusFilter that specifies closed insights that are either REACTIVE or PROACTIVE.
*/
Closed?: ListInsightsClosedStatusFilter;
/**
* A ListInsightsAnyStatusFilter that specifies insights of any status that are either REACTIVE or PROACTIVE.
*/
Any?: ListInsightsAnyStatusFilter;
}
export interface ListNotificationChannelsRequest {
/**
* The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
*/
NextToken?: UuidNextToken;
}
export interface ListNotificationChannelsResponse {
/**
* An array that contains the requested notification channels.
*/
Channels?: Channels;
/**
* The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
*/
NextToken?: UuidNextToken;
}
export interface ListRecommendationsRequest {
/**
* The ID of the requested insight.
*/
InsightId: InsightId;
/**
* The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
*/
NextToken?: UuidNextToken;
}
export interface ListRecommendationsResponse {
/**
* An array of the requested recommendations.
*/
Recommendations?: Recommendations;
/**
* The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
*/
NextToken?: UuidNextToken;
}
export type MeanTimeToRecoverInMilliseconds = number;
export interface NotificationChannel {
/**
* The ID of a notification channel.
*/
Id?: NotificationChannelId;
/**
* A NotificationChannelConfig object that contains information about configured notification channels.
*/
Config?: NotificationChannelConfig;
}
export interface NotificationChannelConfig {
/**
* Information about a notification channel configured in DevOps Guru to send notifications when insights are created. If you use an Amazon SNS topic in another account, you must attach a policy to it that grants DevOps Guru permission to it notifications. DevOps Guru adds the required policy on your behalf to send notifications using Amazon SNS in your account. For more information, see Permissions for cross account Amazon SNS topics. If you use an Amazon SNS topic that is encrypted by an AWS Key Management Service customer-managed key (CMK), then you must add permissions to the CMK. For more information, see Permissions for AWS KMS–encrypted Amazon SNS topics.
*/
Sns: SnsChannelConfig;
}
export type NotificationChannelId = string;
export type NumMetricsAnalyzed = number;
export type NumOpenProactiveInsights = number;
export type NumOpenReactiveInsights = number;
export type NumProactiveInsights = number;
export type NumReactiveInsights = number;
export interface OpsCenterIntegration {
/**
* Specifies if DevOps Guru is enabled to create an AWS Systems Manager OpsItem for each created insight.
*/
OptInStatus?: OptInStatus;
}
export interface OpsCenterIntegrationConfig {
/**
* Specifies if DevOps Guru is enabled to create an AWS Systems Manager OpsItem for each created insight.
*/
OptInStatus?: OptInStatus;
}
export type OptInStatus = "ENABLED"|"DISABLED"|string;
export interface PredictionTimeRange {
/**
* The time range during which a metric limit is expected to be exceeded. This applies to proactive insights only.
*/
StartTime: Timestamp;
/**
* The time when the behavior in a proactive insight is expected to end.
*/
EndTime?: Timestamp;
}
export type ProactiveAnomalies = ProactiveAnomalySummary[];
export interface ProactiveAnomaly {
/**
* The ID of a proactive anomaly.
*/
Id?: AnomalyId;
/**
* The severity of a proactive anomaly.
*/
Severity?: AnomalySeverity;
/**
* The status of a proactive anomaly.
*/
Status?: AnomalyStatus;
/**
* The time of the anomaly's most recent update.
*/
UpdateTime?: Timestamp;
AnomalyTimeRange?: AnomalyTimeRange;
PredictionTimeRange?: PredictionTimeRange;
/**
* Details about the source of the analyzed operational data that triggered the anomaly. The one supported source is Amazon CloudWatch metrics.
*/
SourceDetails?: AnomalySourceDetails;
/**
* The ID of the insight that contains this anomaly. An insight is composed of related anomalies.
*/
AssociatedInsightId?: InsightId;
ResourceCollection?: ResourceCollection;
/**
* A threshold that was exceeded by behavior in analyzed resources. Exceeding this threshold is related to the anomalous behavior that generated this anomaly.
*/
Limit?: AnomalyLimit;
}
export interface ProactiveAnomalySummary {
/**
* The ID of the anomaly.
*/
Id?: AnomalyId;
/**
* The severity of the anomaly.
*/
Severity?: AnomalySeverity;
/**
* The status of the anomaly.
*/
Status?: AnomalyStatus;
/**
* The time of the anomaly's most recent update.
*/
UpdateTime?: Timestamp;
AnomalyTimeRange?: AnomalyTimeRange;
PredictionTimeRange?: PredictionTimeRange;
/**
* Details about the source of the analyzed operational data that triggered the anomaly. The one supported source is Amazon CloudWatch metrics.
*/
SourceDetails?: AnomalySourceDetails;
/**
* The ID of the insight that contains this anomaly. An insight is composed of related anomalies.
*/
AssociatedInsightId?: InsightId;
ResourceCollection?: ResourceCollection;
/**
* A threshold that was exceeded by behavior in analyzed resources. Exceeding this threshold is related to the anomalous behavior that generated this anomaly.
*/
Limit?: AnomalyLimit;
}
export interface ProactiveInsight {
/**
* The ID of the proactive insight.
*/
Id?: InsightId;
/**
* The name of the proactive insight.
*/
Name?: InsightName;
/**
* The severity of the proactive insight.
*/
Severity?: InsightSeverity;
/**
* The status of the proactive insight.
*/
Status?: InsightStatus;
InsightTimeRange?: InsightTimeRange;
PredictionTimeRange?: PredictionTimeRange;
ResourceCollection?: ResourceCollection;
/**
* The ID of the AWS System Manager OpsItem created for this insight. You must enable the creation of OpstItems insights before they are created for each insight.
*/
SsmOpsItemId?: SsmOpsItemId;
}
export interface ProactiveInsightSummary {
/**
* The ID of the proactive insight.
*/
Id?: InsightId;
/**
* The name of the proactive insight.
*/
Name?: InsightName;
/**
* The severity of the proactive insight.
*/
Severity?: InsightSeverity;
/**
* The status of the proactive insight.
*/
Status?: InsightStatus;
InsightTimeRange?: InsightTimeRange;
PredictionTimeRange?: PredictionTimeRange;
ResourceCollection?: ResourceCollection;
}
export type ProactiveInsights = ProactiveInsightSummary[];
export interface PutFeedbackRequest {
/**
* The feedback from customers is about the recommendations in this insight.
*/
InsightFeedback?: InsightFeedback;
}
export interface PutFeedbackResponse {
}
export type ReactiveAnomalies = ReactiveAnomalySummary[];
export interface ReactiveAnomaly {
/**
* The ID of the reactive anomaly.
*/
Id?: AnomalyId;
/**
* The severity of the anomaly.
*/
Severity?: AnomalySeverity;
/**
* The status of the anomaly.
*/
Status?: AnomalyStatus;
AnomalyTimeRange?: AnomalyTimeRange;
/**
* Details about the source of the analyzed operational data that triggered the anomaly. The one supported source is Amazon CloudWatch metrics.
*/
SourceDetails?: AnomalySourceDetails;
/**
* The ID of the insight that contains this anomaly. An insight is composed of related anomalies.
*/
AssociatedInsightId?: InsightId;
ResourceCollection?: ResourceCollection;
}
export interface ReactiveAnomalySummary {
/**
* The ID of the reactive anomaly.
*/
Id?: AnomalyId;
/**
* The severity of the reactive anomaly.
*/
Severity?: AnomalySeverity;
/**
* The status of the reactive anomaly.
*/
Status?: AnomalyStatus;
AnomalyTimeRange?: AnomalyTimeRange;
/**
* Details about the source of the analyzed operational data that triggered the anomaly. The one supported source is Amazon CloudWatch metrics.
*/
SourceDetails?: AnomalySourceDetails;
/**
* The ID of the insight that contains this anomaly. An insight is composed of related anomalies.
*/
AssociatedInsightId?: InsightId;
ResourceCollection?: ResourceCollection;
}
export interface ReactiveInsight {
/**
* The ID of a reactive insight.
*/
Id?: InsightId;
/**
* The name of a reactive insight.
*/
Name?: InsightName;
/**
* The severity of a reactive insight.
*/
Severity?: InsightSeverity;
/**
* The status of a reactive insight.
*/
Status?: InsightStatus;
InsightTimeRange?: InsightTimeRange;
ResourceCollection?: ResourceCollection;
/**
* The ID of the AWS System Manager OpsItem created for this insight. You must enable the creation of OpstItems insights before they are created for each insight.
*/
SsmOpsItemId?: SsmOpsItemId;
}
export interface ReactiveInsightSummary {
/**
* The ID of a reactive summary.
*/
Id?: InsightId;
/**
* The name of a reactive insight.
*/
Name?: InsightName;
/**
* The severity of a reactive insight.
*/
Severity?: InsightSeverity;
/**
* The status of a reactive insight.
*/
Status?: InsightStatus;
InsightTimeRange?: InsightTimeRange;
ResourceCollection?: ResourceCollection;
}
export type ReactiveInsights = ReactiveInsightSummary[];
export interface Recommendation {
/**
* A description of the problem.
*/
Description?: RecommendationDescription;
/**
* A hyperlink to information to help you address the problem.
*/
Link?: RecommendationLink;
/**
* The name of the recommendation.
*/
Name?: RecommendationName;
/**
* The reason DevOps Guru flagged the anomalous behavior as a problem.
*/
Reason?: RecommendationReason;
/**
* Events that are related to the problem. Use these events to learn more about what's happening and to help address the issue.
*/
RelatedEvents?: RecommendationRelatedEvents;
/**
* Anomalies that are related to the problem. Use these Anomalies to learn more about what's happening and to help address the issue.
*/
RelatedAnomalies?: RecommendationRelatedAnomalies;
}
export type RecommendationDescription = string;
export type RecommendationLink = string;
export type RecommendationName = string;
export type RecommendationReason = string;
export type RecommendationRelatedAnomalies = RecommendationRelatedAnomaly[];
export interface RecommendationRelatedAnomaly {
/**
* An array of objects that represent resources in which DevOps Guru detected anomalous behavior. Each object contains the name and type of the resource.
*/
Resources?: RecommendationRelatedAnomalyResources;
/**
* Information about where the anomalous behavior related the recommendation was found. For example, details in Amazon CloudWatch metrics.
*/
SourceDetails?: RelatedAnomalySourceDetails;
}
export interface RecommendationRelatedAnomalyResource {
/**
* The name of the resource.
*/
Name?: RecommendationRelatedAnomalyResourceName;
/**
* The type of the resource.
*/
Type?: RecommendationRelatedAnomalyResourceType;
}
export type RecommendationRelatedAnomalyResourceName = string;
export type RecommendationRelatedAnomalyResourceType = string;
export type RecommendationRelatedAnomalyResources = RecommendationRelatedAnomalyResource[];
export interface RecommendationRelatedAnomalySourceDetail {
/**
* An array of CloudWatchMetricsDetail objects that contains information about the analyzed metrics that displayed anomalous behavior.
*/
CloudWatchMetrics?: RecommendationRelatedCloudWatchMetricsSourceDetails;
}
export interface RecommendationRelatedCloudWatchMetricsSourceDetail {
/**
* The name of the CloudWatch metric.
*/
MetricName?: RecommendationRelatedCloudWatchMetricsSourceMetricName;
/**
* The namespace of the CloudWatch metric. A namespace is a container for CloudWatch metrics.
*/
Namespace?: RecommendationRelatedCloudWatchMetricsSourceNamespace;
}
export type RecommendationRelatedCloudWatchMetricsSourceDetails = RecommendationRelatedCloudWatchMetricsSourceDetail[];
export type RecommendationRelatedCloudWatchMetricsSourceMetricName = string;
export type RecommendationRelatedCloudWatchMetricsSourceNamespace = string;
export interface RecommendationRelatedEvent {
/**
* The name of the event. This corresponds to the Name field in an Event object.
*/
Name?: RecommendationRelatedEventName;
/**
* A ResourceCollection object that contains arrays of the names of AWS CloudFormation stacks.
*/
Resources?: RecommendationRelatedEventResources;
}
export type RecommendationRelatedEventName = string;
export interface RecommendationRelatedEventResource {
/**
* The name of the resource that emitted the event. This corresponds to the Name field in an EventResource object.
*/
Name?: RecommendationRelatedEventResourceName;
/**
* The type of the resource that emitted the event. This corresponds to the Type field in an EventResource object.
*/
Type?: RecommendationRelatedEventResourceType;
}
export type RecommendationRelatedEventResourceName = string;
export type RecommendationRelatedEventResourceType = string;
export type RecommendationRelatedEventResources = RecommendationRelatedEventResource[];
export type RecommendationRelatedEvents = RecommendationRelatedEvent[];
export type Recommendations = Recommendation[];
export type RelatedAnomalySourceDetails = RecommendationRelatedAnomalySourceDetail[];
export interface RemoveNotificationChannelRequest {
/**
* The ID of the notification channel to be removed.
*/
Id: NotificationChannelId;
}
export interface RemoveNotificationChannelResponse {
}
export interface ResourceCollection {
/**
* An array of the names of AWS CloudFormation stacks. The stacks define AWS resources that DevOps Guru analyzes.
*/
CloudFormation?: CloudFormationCollection;
}
export interface ResourceCollectionFilter {
/**
* Information about AWS CloudFormation stacks. You can use stacks to specify which AWS resources in your account to analyze. For more information, see Stacks in the AWS CloudFormation User Guide.
*/
CloudFormation?: CloudFormationCollectionFilter;
}
export type ResourceCollectionType = "AWS_CLOUD_FORMATION"|string;
export type ResourceHours = number;
export interface SearchInsightsFilters {
/**
* An array of severity values used to search for insights.
*/
Severities?: InsightSeverities;
/**
* An array of status values used to search for insights.
*/
Statuses?: InsightStatuses;
ResourceCollection?: ResourceCollection;
}
export type SearchInsightsMaxResults = number;
export interface SearchInsightsRequest {
/**
* The start of the time range passed in. Returned insights occurred after this time.
*/
StartTimeRange: StartTimeRange;
/**
* A SearchInsightsFilters object that is used to set the severity and status filters on your insight search.
*/
Filters?: SearchInsightsFilters;
/**
* The maximum number of results to return with a single call. To retrieve the remaining results, make another call with the returned nextToken value.
*/
MaxResults?: SearchInsightsMaxResults;
/**
* The pagination token to use to retrieve the next page of results for this operation. If this value is null, it retrieves the first page.
*/
NextToken?: UuidNextToken;
/**
* The type of insights you are searching for (REACTIVE or PROACTIVE).
*/
Type: InsightType;
}
export interface SearchInsightsResponse {
/**
* The returned proactive insights.
*/
ProactiveInsights?: ProactiveInsights;
/**
* The returned reactive insights.
*/
ReactiveInsights?: ReactiveInsights;
/**
* The pagination token to use to retrieve the next page of results for this operation. If there are no more pages, this value is null.
*/
NextToken?: UuidNextToken;
}
export interface ServiceIntegrationConfig {
/**
* Information about whether DevOps Guru is configured to create an OpsItem in AWS Systems Manager OpsCenter for each created insight.
*/
OpsCenter?: OpsCenterIntegration;
}
export interface SnsChannelConfig {
/**
* The Amazon Resource Name (ARN) of an Amazon Simple Notification Service topic.
*/
TopicArn?: TopicArn;
}
export type SsmOpsItemId = string;
export type StackName = string;
export type StackNames = StackName[];
export interface StartTimeRange {
/**
* The start time of the time range.
*/
FromTime?: Timestamp;
/**
* The end time of the time range.
*/
ToTime?: Timestamp;
}
export type Timestamp = Date;
export type TopicArn = string;
export interface UpdateCloudFormationCollectionFilter {
/**
* An array of the name of stacks to update.
*/
StackNames?: UpdateStackNames;
}
export type UpdateResourceCollectionAction = "ADD"|"REMOVE"|string;
export interface UpdateResourceCollectionFilter {
/**
* An collection of AWS CloudFormation stacks.
*/
CloudFormation?: UpdateCloudFormationCollectionFilter;
}
export interface UpdateResourceCollectionRequest {
/**
* Specifies if the resource collection in the request is added or deleted to the resource collection.
*/
Action: UpdateResourceCollectionAction;
ResourceCollection: UpdateResourceCollectionFilter;
}
export interface UpdateResourceCollectionResponse {
}
export interface UpdateServiceIntegrationConfig {
OpsCenter?: OpsCenterIntegrationConfig;
}
export interface UpdateServiceIntegrationRequest {
/**
* An IntegratedServiceConfig object used to specify the integrated service you want to update, and whether you want to update it to enabled or disabled.
*/
ServiceIntegration: UpdateServiceIntegrationConfig;
}
export interface UpdateServiceIntegrationResponse {
}
export type UpdateStackNames = StackName[];
export type UuidNextToken = string;
/**
* A string in YYYY-MM-DD format that represents the latest possible API version that can be used in this service. Specify 'latest' to use the latest possible version.
*/
export type apiVersion = "2020-12-01"|"latest"|string;
export interface ClientApiVersions {
/**
* A string in YYYY-MM-DD format that represents the latest possible API version that can be used in this service. Specify 'latest' to use the latest possible version.
*/
apiVersion?: apiVersion;
}
export type ClientConfiguration = ServiceConfigurationOptions & ClientApiVersions;
/**
* Contains interfaces for use with the DevOpsGuru client.
*/
export import Types = DevOpsGuru;
}
export = DevOpsGuru; | the_stack |
"use strict";
import path = require('path');
import { execSync, ExecSyncOptionsWithStringEncoding } from 'child_process';
import * as winston from 'winston';
import { PowerPlatformCommand } from './powerplatform';
import axios, { AxiosResponse, AxiosStatic } from 'axios';
import { Environment } from '../common/enviroment';
import { Prompt } from '../common/prompt';
/**
* ALM Accelereator for Advanced Makers commands
*/
class AADCommand {
runCommand: (command: string, displayOutput: boolean) => string
prompt: Prompt
logger: winston.Logger
getAxios: () => AxiosStatic
constructor(logger: winston.Logger) {
this.logger = logger
this.runCommand = (command: string, displayOutput: boolean) => {
if (displayOutput) {
return execSync(command, <ExecSyncOptionsWithStringEncoding>{ stdio: 'inherit', encoding: 'utf8' })
} else {
return execSync(command, <ExecSyncOptionsWithStringEncoding>{ encoding: 'utf8' })
}
}
this.getAxios = () => axios
this.prompt = new Prompt()
}
getAADGroup(args: IMakerGroup): string {
let json = this.runCommand(`az ad group list --display-name "${args.azureActiveDirectoryMakersGroup}"`, false)
let groups = <any[]>JSON.parse(json)
if (groups.length == 1) {
return groups[0].objectId
}
return null
}
/**
* Install AAD Group
* @param args
* @returns
*/
installAADGroup(args: AADAppInstallArguments): string {
if ( args.azureActiveDirectoryMakersGroup?.length == 0 ) {
this.logger?.info('Skipping group create')
return null;
}
let group : string = this.getAADGroup(args)
if ( group === null) {
this.logger?.info(`Creating ${args.azureActiveDirectoryMakersGroup} group`)
let createJson = this.runCommand(`az ad group create --display-name "${args.azureActiveDirectoryMakersGroup}" --description "Application Lifecycle Management Accelerator for Advanced Makers" --mail-nickname="null"`, false)
group = JSON.parse(createJson).objectId
} else {
this.logger?.info(`Group ${args.azureActiveDirectoryMakersGroup} exists`)
}
return group
}
getAADApplication(args: IAADApplicationArguments): string {
let app = <any[]>JSON.parse(this.runCommand(`az ad app list --filter "displayName eq '${args.azureActiveDirectoryServicePrincipal}'"`, false))
if (app.length == 1) {
return app[0].appId
}
return null
}
/**
* Add user to group
* @param user The object ID of the contact, group, user, or service principal.
* @param azureActiveDirectoryMakersGroup Group's object id or display name(prefix also works if there is a unique match).
*/
addUserToGroup(user: string, azureActiveDirectoryMakersGroup: string) {
let userInfo = JSON.parse(this.runCommand(`az ad user show --id ${user}`, false))
if (typeof userInfo === "object" && typeof userInfo.objectId === "string") {
let result = JSON.parse(this.runCommand(`az ad group member check --group ${azureActiveDirectoryMakersGroup} --member-id ${userInfo.objectId}`, false))
let exists : Boolean = (result.value == true)
if (!exists) {
this.logger?.info(`Add ${userInfo.objectId} to ${azureActiveDirectoryMakersGroup}`)
this.runCommand(`az ad group member add --group ${azureActiveDirectoryMakersGroup} --member-id ${userInfo.objectId}`, false)
} else {
this.logger?.info(`User exists in ${azureActiveDirectoryMakersGroup}`)
}
} else {
this.logger?.info(`Unable add user to ${azureActiveDirectoryMakersGroup}`)
this.logger?.debug(userInfo)
}
}
/**
* Create the service principal required to manage solutions between Azure DevOps and the Power Platform environments
* @param args
* @returns
*/
async installAADApplication(args: AADAppInstallArguments): Promise<void> {
if (await this.validateAzCliReady(args)) {
let manifest = path.join(__dirname, '..', '..', '..', 'config', 'manifest.json')
// Find if application has been created already
let app = <any[]>JSON.parse(this.runCommand(`az ad app list --filter "displayName eq '${args.azureActiveDirectoryServicePrincipal}'"`, false))
if (app.length == 0) {
this.logger?.info(`Creating application ${args.azureActiveDirectoryServicePrincipal}`)
let createCommand = `az ad app create --display-name "${args.azureActiveDirectoryServicePrincipal}" --available-to-other-tenants false --required-resource-accesses "${manifest}"`
let appCreateText = this.runCommand(createCommand, false)
let appCreate = JSON.parse(appCreateText)
if ( typeof appCreate.Error !== "undefined") {
this.logger.error(appCreate.Error)
}
app = <any[]>JSON.parse(this.runCommand(`az ad app list --filter "displayName eq '${args.azureActiveDirectoryServicePrincipal}'"`, false))
this.logger?.info("Creating application service principal")
this.runCommand(`az ad sp create --id ${app[0].appId}`, false)
}
if (app.length == 1) {
this.logger.info("Application exists")
let permissions : any[]
let waiting = true
let attempt = 0
while ( waiting ) {
if (attempt > 60) {
break
}
try {
permissions= <any[]>JSON.parse(this.runCommand(`az ad app permission list-grants --id ${app[0].appId}`, false))
if (permissions.length >= 0) {
break
}
} catch {
}
this.logger?.debug(`Waiting attempt ${attempt}`)
await this.sleep(1000)
attempt++
}
if (permissions?.length < 3) {
this.logger.info("Administration grant not set")
this.logger?.info("Granting Azure DevOps delegated admin consent")
this.grantAdminDelegatedPermissions(app[0].appId, '499b84ac-1321-427f-aa17-267ca6975798', 'ee69721e-6c3a-468f-a9ec-302d16a4c599')
this.logger?.info("Granting PowerApps-Advisor delegated admin consent")
this.grantAdminDelegatedPermissions(app[0].appId, 'c9299480-c13a-49db-a7ae-cdfe54fe0313', 'd533b86d-8f67-45f0-b8bb-c0cee8da0356')
this.logger?.info("Granting Dynamics CRM delegated admin consent")
this.grantAdminDelegatedPermissions(app[0].appId, '00000007-0000-0000-c000-000000000000', '78ce3f0f-a1ce-49c2-8cde-64b5c0896db4')
try {
permissions= <any[]>JSON.parse(this.runCommand(`az ad app permission list-grants --id ${app[0].appId}`, false))
} catch {
}
}
if (permissions?.length == 3) {
this.logger?.info("Admin permissions granted")
} else {
this.logger.info("Unable to verify that Administration permissions set")
}
let match = 0
app[0].replyUrls?.forEach( (u:string) => {
if ( u == "https://global.consent.azure-apim.net/redirect") {
match++
}
})
if ( app[0].replyUrls.length == 0 || match == 0) {
this.logger?.debug('Adding reply url https://global.consent.azure-apim.net/redirect')
this.runCommand(`az ad app update --id ${app[0].appId} --reply-urls https://global.consent.azure-apim.net/redirect`, true)
}
} else {
this.logger?.info(`Application ${args.azureActiveDirectoryServicePrincipal} not found`)
return Promise.resolve()
}
}
}
grantAdminDelegatedPermissions(appId: string, apiId: string, scope: string) {
// https://github.com/Azure/azure-cli/issues/12137#issuecomment-596567479
let result = this.runCommand(`az ad app permission grant --id ${appId} --api ${apiId} --scope ${scope}`, false)
this.logger?.debug(result)
}
sleep(ms: number) {
return new Promise((resolve) => {
setTimeout(resolve, ms);
});
}
/**
* Add a secret to an existing AAD application
* @param args
* @returns
*/
async addSecret(args: AADAppInstallArguments, name: string): Promise<AADAppSecret> {
if (await this.validateAzCliReady(args)) {
let result = <AADAppSecret>{}
let accounts = <any[]>JSON.parse(this.runCommand(`az account list --query [?isDefault]`, false))
result.tenantId = accounts[0].tenantId
let apps = <any[]>JSON.parse(this.runCommand(`az ad app list --filter "displayName eq '${args.azureActiveDirectoryServicePrincipal}'"`, false))
if (apps.length == 1) {
result.clientId = apps[0].appId
let suffix = ''
let match = 0;
apps[0].passwordCredentials?.forEach( (element:any) => {
if (element.customKeyIdentifier?.startsWith(name)) {
match++
}
});
if (match > 0) {
suffix = `-${match + 1}`
}
if (args.createSecret) {
this.logger?.info(`Creating AAD password for ${args.azureActiveDirectoryServicePrincipal}`)
name = name.replace('-', '')
let newName = `${name}${suffix}`.length > (15) ? `${name}${suffix}`.substr(0,15) : name
this.logger?.info(`Creating secret for ${newName}`)
let creds = JSON.parse(this.runCommand(`az ad app credential reset --id ${apps[0].appId} --append --credential-description ${newName}`, false))
result.clientSecret = creds.password
result.tenantId = creds.tenant
}
}
return Promise.resolve(result)
}
}
async validateAzCliReady(args: AADAppInstallArguments): Promise<boolean> {
let validated = false
while (!validated) {
let accounts: any[]
try {
accounts = <any[]>JSON.parse(this.runCommand('az account list', false))
} catch {
accounts = []
}
// Check if tenant assigned
if (typeof (args.subscription) == "undefined" || (args.subscription.length == 0)) {
if (accounts.length == 0) {
// No accounts are available probably not logged in ... prompt to login
let ok = await this.prompt.yesno('You are not logged into an account. Try login now (y/n)?', true)
if (ok) {
this.runCommand('az login --use-device-code --allow-no-subscriptions', true)
} else {
return Promise.resolve(false);
}
}
if (accounts.length > 0) {
let defaultAccount = accounts.filter((a: any) => (a.isDefault));
if (accounts.length == 1) {
// Only one subscription assigned to the user account use that
args.subscription = accounts[0].id
}
if (defaultAccount.length == 1 && accounts.length > 1) {
// More than one account assigned to this account .. confirm if want to use the current default tenant
let ok = await this.prompt.yesno(`Use default tenant ${defaultAccount[0].tenantId} in account ${defaultAccount[0].name} (y/n)?`, true);
if (ok) {
// Use the default account
args.subscription = defaultAccount[0].id
}
}
if (typeof (args.subscription) == "undefined" || (args.subscription.length == 0)) {
this.logger?.info("Missing subscription, run az account list to and it -a argument to assign the default subscription/account")
return Promise.resolve(false);
}
}
}
if (accounts.length > 0) {
let match = accounts.filter((a: any) => (a.id == args.subscription || a.name == args.subscription) && (a.isDefault));
if (match.length != 1) {
this.logger?.info(`${args.subscription} is not the default account. Check you have run az login and have selected the correct default account using az account set --subscription`)
this.logger?.info('Read more https://docs.microsoft.com/en-us/cli/azure/account?view=azure-cli-latest#az_account_set')
return Promise.resolve(false)
} else {
return Promise.resolve(true)
}
}
}
}
}
/**
* Azure Active Directory User Arguments
*/
class AADAppInstallArguments {
constructor() {
this.accessTokens = {}
this.settings = {}
}
/**
* The power platform endpoint to interact with
*/
endpoint: string;
/**
* Audiance scoped access tokens
*/
accessTokens: { [id: string] : string }
/**
* The name of the Azure subscription
*/
subscription: string
/**
* Azure Active directory application name
*/
azureActiveDirectoryServicePrincipal: string
/**
* Azure Active directory makers group
*/
azureActiveDirectoryMakersGroup: string
/**
* Create secret for application
*/
createSecret: boolean
/**
* Optional settings
*/
settings: { [id: string] : string }
}
type AADAppSecret = {
tenantId: string,
clientId: string,
clientSecret: string
}
interface IMakerGroup {
azureActiveDirectoryMakersGroup: string
}
interface IAADApplicationArguments {
azureActiveDirectoryServicePrincipal: string
}
export {
AADAppInstallArguments,
AADAppSecret,
AADCommand,
IAADApplicationArguments
}; | the_stack |
import type { ContentBlock } from '../index';
import { EditorState } from '../index';
import {
shiftRedoStack,
pushToRedoStack,
removeCompositionModeFromEditorState,
preserveSelection,
replaceComponentData,
removeFocus,
setLastChangeType,
getBlocksEntityTypeAndData,
didBlocksChange,
doesEntityExistInBoth,
} from './undoRedoDraftUtils';
import {
IMAGE_TYPE,
VIDEO_TYPE,
FILE_UPLOAD_TYPE,
GALLERY_TYPE,
COLLAPSIBLE_LIST_TYPE,
TABLE_TYPE,
POLL_TYPE,
} from 'wix-rich-content-common';
import { isEqual } from 'lodash';
/* Type declarations */
const IGNORE_TYPE = 'ignore';
const INNER_RICOS_TYPES = [COLLAPSIBLE_LIST_TYPE, TABLE_TYPE];
// new plugins should be added while they are not supported
const PLUGINS_TO_IGNORE: string[] = [POLL_TYPE];
type EntityToReplace = {
fixedData?: Record<string, unknown>;
blockKey?: string;
shouldUndoAgain: boolean;
};
type UndoOperationResult = {
fixedEditorState: EditorState;
shouldUndoAgain: boolean;
};
function getType(type: string) {
if (PLUGINS_TO_IGNORE.includes(type)) {
return IGNORE_TYPE;
}
return type;
}
function getFixedEntityFromInnerRicos(currentBlock, contentState, newContentState) {
const { key: blockKey } = currentBlock;
const { type, data: currentData } = getBlocksEntityTypeAndData(blockKey, contentState);
const { data: newData } = getBlocksEntityTypeAndData(blockKey, newContentState);
const entityToReplace = innerRicosDataFixers[type]?.(currentData, newData);
const { shouldUndoAgain, fixedData } = entityToReplace;
const didInnerRicosChange = (shouldUndoAgain && !!fixedData) || !shouldUndoAgain;
return didInnerRicosChange && entityToReplace;
}
function getFixedEntity(currentBlock, contentState, newContentState) {
const { key: blockKey } = currentBlock;
const { type, data: currentData } = getBlocksEntityTypeAndData(blockKey, contentState);
const { data: newData } = getBlocksEntityTypeAndData(blockKey, newContentState);
const didChange = !isEqual(currentData, newData);
let entityToReplace: EntityToReplace = { shouldUndoAgain: !didChange };
const fixedData = didChange && entityDataFixers[getType(type)]?.(currentData, newData);
if (fixedData) {
entityToReplace = {
fixedData,
shouldUndoAgain: true,
};
}
return didChange && entityToReplace;
}
function getEntityToReplace(newContentState, contentState): EntityToReplace {
let entityToReplace;
const didChange = contentState
.getBlockMap()
.filter((block: ContentBlock) => doesEntityExistInBoth(block, newContentState))
.some(currentBlock => {
const { key: blockKey } = currentBlock;
const { type } = getBlocksEntityTypeAndData(blockKey, contentState);
const fixedEntityGetter = INNER_RICOS_TYPES.includes(type)
? getFixedEntityFromInnerRicos
: getFixedEntity;
const entity = fixedEntityGetter(currentBlock, contentState, newContentState);
entityToReplace = {
blockKey,
...entity,
};
return !!entity;
});
return (didChange && entityToReplace) || { shouldUndoAgain: true };
}
// fixes entities in EditorStates
function fixBrokenRicosStates(
nextEditorState: EditorState,
editorState: EditorState
): UndoOperationResult {
const newContentState = nextEditorState.getCurrentContent();
const contentState = editorState.getCurrentContent();
const result: UndoOperationResult = {
fixedEditorState: preserveSelection(editorState, nextEditorState),
shouldUndoAgain: false,
};
if (!didBlocksChange(contentState, newContentState)) {
const { blockKey, fixedData, shouldUndoAgain } = getEntityToReplace(
newContentState,
contentState
);
blockKey && fixedData && replaceComponentData(result.fixedEditorState, blockKey, fixedData);
result.shouldUndoAgain = shouldUndoAgain;
}
return result;
}
/* Collapsible List Entity Handling */
function didCollapsibleListConfigChange(currentData, newData): boolean {
const { pairs: currentPairs } = currentData;
const { pairs: newPairs } = newData;
return newPairs.length !== currentPairs.length || !isEqual(currentData.config, newData.config);
}
function checkCollapsibleListPair(currentPair, newPair): string | boolean {
const { key: newKey, title: newTitle, content: newContent } = newPair;
const { key: currentKey, title: currentTitle, content: currentContent } = currentPair;
return (
(currentKey !== newKey && 'key') ||
(currentTitle.getCurrentContent() !== newTitle.getCurrentContent() && 'title') ||
(currentContent.getCurrentContent() !== newContent.getCurrentContent() && 'content')
);
}
// checks if an collapsible list pair changed and if so returns the changed pair index and an indictor (title or content)
function getChangedCollapsibleListPairIndex(currentPairs, newPairs) {
let item;
const changedPairIndex = newPairs.findIndex((newPair, index) => {
const currentPair = currentPairs[index];
item = checkCollapsibleListPair(currentPair, newPair);
return !!item;
});
return { didChange: changedPairIndex > -1, changedPairIndex, item };
}
function getNewPairs(currentPairs, newPairs) {
return newPairs.map((newPair, index) => {
return {
key: newPair.key || currentPairs[index].key,
title: newPair.title || currentPairs[index].title,
content: newPair.content || currentPairs[index].content,
};
});
}
function getFixedCollapsibleListEditorStates(currentData, newData) {
const { pairs: currentPairs } = currentData;
const newPairs = getNewPairs(currentPairs, newData.pairs);
const { didChange, changedPairIndex, item } = getChangedCollapsibleListPairIndex(
currentPairs,
newPairs
);
let entityToReplace: EntityToReplace = { shouldUndoAgain: !didChange };
if (item && item !== 'key') {
const { fixedEditorState, shouldUndoAgain } = fixBrokenRicosStates(
newPairs[changedPairIndex][item],
currentPairs[changedPairIndex][item]
);
newPairs[changedPairIndex][item] = setLastChangeType(removeFocus(fixedEditorState), 'undo');
entityToReplace = {
fixedData: {
...newData,
pairs: newPairs,
},
shouldUndoAgain,
};
}
return entityToReplace;
}
function setChangeTypeForCollapsibleListPairs(newPairs, lastChangeType) {
return newPairs.map(pair => {
return {
key: pair.key,
title: setLastChangeType(
pair.title
? EditorState.createWithContent(pair.title.getCurrentContent())
: EditorState.createEmpty(),
lastChangeType
),
content: setLastChangeType(
pair.content
? EditorState.createWithContent(pair.content?.getCurrentContent())
: EditorState.createEmpty(),
lastChangeType
),
};
});
}
function handleCollapsibleListEntity(currentData, newData): EntityToReplace {
return didCollapsibleListConfigChange(currentData, newData)
? { shouldUndoAgain: false }
: getFixedCollapsibleListEditorStates(currentData, newData);
}
/* Table Entity Handling */
function setCellChangeType(rows, rowKey, columnKey, lastChangeType) {
rows[rowKey].columns[columnKey].content = setLastChangeType(
rows[rowKey].columns[columnKey].content,
lastChangeType
);
}
function setChangeTypeForTableCells(newRows, lastChangeType) {
Object.keys(newRows).forEach(rowKey =>
Object.keys(newRows[rowKey].columns).forEach(columnKey => {
setCellChangeType(newRows, rowKey, columnKey, lastChangeType);
})
);
return newRows;
}
// check table row's columns for a changed column.
function checkColumnEditorStates(newRow, currentRow) {
return Object.keys(newRow).find(columnKey => {
const { content: newContent } = newRow[columnKey];
const { content: currentContent } = currentRow[columnKey];
return newContent.getCurrentContent() !== currentContent.getCurrentContent();
});
}
// looks for a changed cell in the new content, if there is returns it's indices.
function getChangedTableCellIndex(newRows, currentRows) {
const changedCell: { row?: string; column?: string } = {};
changedCell.row = Object.keys(newRows).find(rowKey => {
const currentRow = currentRows[rowKey].columns;
const newRow = newRows[rowKey].columns;
changedCell.column = checkColumnEditorStates(newRow, currentRow);
return !!changedCell.column;
});
return changedCell;
}
function getFixedTableData(currentData, newData): EntityToReplace {
const { rows: newRows, ...newConfig } = newData.config;
const { rows: currentRows } = currentData.config;
const { row, column } = getChangedTableCellIndex(newRows, currentRows);
const entityToReplace: EntityToReplace = { shouldUndoAgain: !(row && column) };
if (row && column) {
const { fixedEditorState, shouldUndoAgain } = fixBrokenRicosStates(
newRows[row].columns[column].content,
currentRows[row].columns[column].content
);
newRows[row].columns[column].content = setLastChangeType(removeFocus(fixedEditorState), 'undo');
entityToReplace.fixedData = {
...newData,
config: {
rows: newRows,
...newConfig,
},
};
entityToReplace.shouldUndoAgain = shouldUndoAgain;
}
return entityToReplace;
}
function didTableConfigChange(currentData, newData) {
const { rows: newRows, ...newConfig } = newData.config;
const { rows: currentRows, ...currentConfig } = currentData.config;
return (
!isEqual(newConfig, currentConfig) ||
Object.keys(newRows).length !== Object.keys(currentRows).length
);
}
function didCellStyleChange(newRow, currentRow) {
return Object.keys(newRow).some(columnKey => {
/* eslint-disable no-unused-vars */
/* eslint-disable @typescript-eslint/no-unused-vars */
const { content: newContent, ...newStyles } = newRow[columnKey];
const { content: currentContent, ...currentStyles } = currentRow[columnKey];
/* eslint-enable no-unused-vars */
/* eslint-enable @typescript-eslint/no-unused-vars */
return JSON.stringify(newStyles) !== JSON.stringify(currentStyles);
});
}
function didTableRowStyleChange(currentData, newData) {
const { rows: newRows } = newData.config;
const { rows: currentRows } = currentData.config;
return (
Object.keys(newRows).length !== Object.keys(currentRows).length ||
Object.keys(newRows).some(rowKey => {
const currentRow = currentRows[rowKey].columns;
const newRow = newRows[rowKey].columns;
return (
!currentRow ||
Object.keys(newRow).length !== Object.keys(currentRow).length ||
didCellStyleChange(newRow, currentRow)
);
})
);
}
function handleTableEntity(currentData, newData): EntityToReplace {
return didTableConfigChange(currentData, newData) || didTableRowStyleChange(currentData, newData)
? { shouldUndoAgain: false }
: getFixedTableData(currentData, newData);
}
/* Data Fixers for breakable plugins */
// applies undo action on gallery items by fixing the changed item if it's source was broken
function applyActionForGalleryItems(currentItems, newItems) {
let didChange = false;
const fixedItems = newItems.map((newItem, index) => {
const currentItem = currentItems[index];
if (currentItem && !currentItem.tempData && newItem.tempData) {
didChange = true;
const {
url,
metadata: { type, height, width },
} = currentItem;
const metadata = { ...newItem.metadata, type, height, width };
return { ...newItem, url, metadata, tempData: undefined };
}
return newItem;
});
return { fixedItems, didChange };
}
function shouldReplaceVideoData(currentData, newData): boolean {
return (
(!currentData.tempData && newData.tempData) ||
(currentData.metadata && !newData.metadata) ||
(currentData.src && !newData.src)
);
}
const entityDataFixers = {
[IMAGE_TYPE]: (currentData, newData) => {
const { src, error } = currentData;
if (src && !newData.src) {
return { ...newData, src, error };
}
},
[VIDEO_TYPE]: (currentData, newData) => {
if (shouldReplaceVideoData(currentData, newData)) {
const { config } = newData;
return { ...currentData, config };
}
},
[FILE_UPLOAD_TYPE]: (currentData, newData) => {
const { config } = newData;
return { ...currentData, config };
},
[GALLERY_TYPE]: (currentData, newData) => {
const { fixedItems, didChange } = applyActionForGalleryItems(currentData.items, newData.items);
if (didChange) {
return { ...newData, items: fixedItems };
} else if (!currentData.loading && newData.loading) {
return currentData;
}
},
[IGNORE_TYPE]: currentData => {
return { ...currentData };
},
};
const innerRicosDataFixers = {
[COLLAPSIBLE_LIST_TYPE]: (currentData, newData) =>
handleCollapsibleListEntity(currentData, newData),
[TABLE_TYPE]: (currentData, newData) => handleTableEntity(currentData, newData),
};
const innerRicosChangeTypeSetters = {
[COLLAPSIBLE_LIST_TYPE]: prevData => {
return {
...prevData,
pairs: setChangeTypeForCollapsibleListPairs(prevData.pairs, 'redo'),
};
},
[TABLE_TYPE]: prevData => {
return {
...prevData,
config: {
...prevData.config,
rows: setChangeTypeForTableCells(prevData.config.rows, 'redo'),
},
};
},
};
/* Handle undo-redo calls */
function getContentStateForRedoStack(prevEditorState: EditorState) {
const prevContentState = prevEditorState.getCurrentContent();
prevContentState
.getBlockMap()
.filter((block: ContentBlock) => !!block.getEntityAt(0))
.forEach((block: ContentBlock) => {
const blockKey = block.getKey();
const { type, data: prevData } = getBlocksEntityTypeAndData(blockKey, prevContentState);
const fixedData = innerRicosChangeTypeSetters[type]?.(prevData);
fixedData && replaceComponentData(prevEditorState, blockKey, fixedData);
});
return prevEditorState.getCurrentContent();
}
function updateUndoEditorState(editorState: EditorState, newEditorState: EditorState): EditorState {
const { fixedEditorState, shouldUndoAgain } = fixBrokenRicosStates(newEditorState, editorState);
return shouldUndoAgain
? undo(fixedEditorState)
: pushToRedoStack(
removeCompositionModeFromEditorState(fixedEditorState),
getContentStateForRedoStack(editorState)
);
}
export const undo = (editorState: EditorState): EditorState => {
if (editorState.getUndoStack().isEmpty()) {
return editorState;
}
const newEditorState = shiftRedoStack(EditorState.undo(editorState));
return updateUndoEditorState(editorState, newEditorState);
};
export const redo = (editorState: EditorState): EditorState => {
if (editorState.getRedoStack().isEmpty()) {
return editorState;
}
return removeCompositionModeFromEditorState(EditorState.redo(editorState));
}; | the_stack |
import { h, Component, Fragment } from 'preact';
import { useEffect, useRef } from 'preact/hooks';
import { useSelector } from 'react-redux';
import b from 'bem-react-helper';
import { IntlShape, useIntl, FormattedMessage, defineMessages } from 'react-intl';
import clsx from 'clsx';
import 'styles/global.css';
import type { StoreState } from 'store';
import { COMMENT_NODE_CLASSNAME_PREFIX, MAX_SHOWN_ROOT_COMMENTS, THEMES, IS_MOBILE } from 'common/constants';
import { maxShownComments, url } from 'common/settings';
import { StaticStore } from 'common/static-store';
import {
setUser,
fetchUser,
blockUser,
unblockUser,
fetchBlockedUsers,
hideUser,
unhideUser,
signout,
} from 'store/user/actions';
import { fetchComments, addComment, updateComment, unsetCommentMode } from 'store/comments/actions';
import { setCommentsReadOnlyState } from 'store/post-info/actions';
import { setTheme } from 'store/theme/actions';
// TODO: make this button as default for all cases and replace current `components/Button`
import { Button } from 'components/auth/components/button';
import { Preloader } from 'components/preloader';
import { Settings } from 'components/settings';
import { AuthPanel } from 'components/auth-panel';
import { SortPicker } from 'components/sort-picker';
import { CommentForm } from 'components/comment-form';
import { Thread } from 'components/thread';
import { ConnectedComment as Comment } from 'components/comment/connected-comment';
import { uploadImage, getPreview } from 'common/api';
import { isUserAnonymous } from 'utils/isUserAnonymous';
import { bindActions } from 'utils/actionBinder';
import { postMessageToParent, parseMessage } from 'utils/post-message';
import { useActions } from 'hooks/useAction';
import { setCollapse } from 'store/thread/actions';
import styles from './root.module.css';
const mapStateToProps = (state: StoreState) => ({
sort: state.comments.sort,
isCommentsLoading: state.comments.isFetching,
user: state.user,
childToParentComments: Object.entries(state.comments.childComments).reduce(
(accumulator: Record<string, string>, [key, children]) => {
children.forEach((child) => (accumulator[child] = key));
return accumulator;
},
{}
),
collapsedThreads: state.collapsedThreads,
topComments: state.comments.topComments,
pinnedComments: state.comments.pinnedComments.map((id) => state.comments.allComments[id]).filter((c) => !c.hidden),
theme: state.theme,
info: state.info,
hiddenUsers: state.hiddenUsers,
blockedUsers: state.bannedUsers,
getPreview,
uploadImage,
});
const boundActions = bindActions({
fetchComments,
setUser,
fetchUser,
fetchBlockedUsers,
setTheme,
setCommentsReadOnlyState,
blockUser,
unblockUser,
hideUser,
unhideUser,
addComment,
updateComment,
setCollapse,
unsetCommentMode,
signout,
});
type Props = ReturnType<typeof mapStateToProps> & typeof boundActions & { intl: IntlShape };
interface State {
isUserLoading: boolean;
isSettingsVisible: boolean;
commentsShown: number;
wasSomeoneUnblocked: boolean;
}
const messages = defineMessages({
pinnedComments: {
id: 'root.pinned-comments',
defaultMessage: 'Pinned comments',
},
});
const getCollapsedParent = (hash: string, childToParentComments: Record<string, string>) => {
let id = hash.replace(`#${COMMENT_NODE_CLASSNAME_PREFIX}`, '');
while (childToParentComments[id]) {
id = childToParentComments[id];
}
return id;
};
/** main component fr main comments widget */
export class Root extends Component<Props, State> {
state = {
isUserLoading: true,
commentsShown: maxShownComments,
wasSomeoneUnblocked: false,
isSettingsVisible: false,
};
componentDidMount() {
const userloading = this.props.fetchUser().finally(() => this.setState({ isUserLoading: false }));
Promise.all([userloading, this.props.fetchComments()]).finally(() => {
setTimeout(this.checkUrlHash);
window.addEventListener('hashchange', this.checkUrlHash);
postMessageToParent({ height: document.body.offsetHeight });
});
window.addEventListener('message', this.onMessage);
}
checkUrlHash = (e: Event & { newURL: string }) => {
const hash = e ? `#${e.newURL.split('#')[1]}` : window.location.hash;
if (hash.indexOf(`#${COMMENT_NODE_CLASSNAME_PREFIX}`) === 0) {
if (e) e.preventDefault();
if (!document.querySelector(hash)) {
const id = getCollapsedParent(hash, this.props.childToParentComments);
const indexHash = this.props.topComments.findIndex((item) => item === id);
const multiplierCollapsed = Math.ceil(indexHash / MAX_SHOWN_ROOT_COMMENTS);
this.setState(
{
commentsShown: this.state.commentsShown + MAX_SHOWN_ROOT_COMMENTS * multiplierCollapsed,
},
() => setTimeout(() => this.toMessage(hash), 500)
);
} else {
this.toMessage(hash);
}
}
};
toMessage = (hash: string) => {
const comment = document.querySelector(hash);
if (comment) {
postMessageToParent({ scrollTo: comment.getBoundingClientRect().top });
comment.classList.add('comment_highlighting');
setTimeout(() => {
comment.classList.remove('comment_highlighting');
}, 5e3);
}
};
onMessage = (event: MessageEvent) => {
const data = parseMessage(event);
if (data.signout === true) {
this.props.signout(false);
}
if (!data.theme || !THEMES.includes(data.theme)) {
return;
}
this.props.setTheme(data.theme);
};
onBlockedUsersShow = async () => {
if (this.props.user && this.props.user.admin) {
await this.props.fetchBlockedUsers();
}
this.setState({ isSettingsVisible: true });
};
onBlockedUsersHide = async () => {
// if someone was unblocked let's reload comments
if (this.state.wasSomeoneUnblocked) {
this.props.fetchComments();
}
this.setState({
wasSomeoneUnblocked: false,
isSettingsVisible: false,
});
};
onUnblockSomeone = () => {
this.setState({ wasSomeoneUnblocked: true });
};
showMore = () => {
this.setState({
commentsShown: this.state.commentsShown + MAX_SHOWN_ROOT_COMMENTS,
});
};
render(props: Props, { isUserLoading, commentsShown, isSettingsVisible }: State) {
if (isUserLoading) {
return <Preloader className="root__preloader" />;
}
const isCommentsDisabled = props.info.read_only!;
const imageUploadHandler = isUserAnonymous(this.props.user) ? undefined : this.props.uploadImage;
return (
<Fragment>
<AuthPanel
user={this.props.user}
hiddenUsers={this.props.hiddenUsers}
isCommentsDisabled={isCommentsDisabled}
postInfo={this.props.info}
signout={this.props.signout}
onBlockedUsersShow={this.onBlockedUsersShow}
onBlockedUsersHide={this.onBlockedUsersHide}
onCommentsChangeReadOnlyMode={this.props.setCommentsReadOnlyState}
/>
<div className="root__main">
{isSettingsVisible ? (
<Settings
intl={this.props.intl}
user={this.props.user}
hiddenUsers={this.props.hiddenUsers}
blockedUsers={this.props.blockedUsers}
blockUser={this.props.blockUser}
unblockUser={this.props.unblockUser}
hideUser={this.props.hideUser}
unhideUser={this.props.unhideUser}
onUnblockSomeone={this.onUnblockSomeone}
/>
) : (
<>
{!isCommentsDisabled && (
<CommentForm
id={encodeURI(url || '')}
intl={this.props.intl}
theme={props.theme}
mix="root__input"
mode="main"
user={props.user}
onSubmit={(text: string, title: string) => this.props.addComment(text, title)}
getPreview={this.props.getPreview}
uploadImage={imageUploadHandler}
simpleView={StaticStore.config.simple_view}
/>
)}
{this.props.pinnedComments.length > 0 && (
<div
className="root__pinned-comments"
role="region"
aria-label={this.props.intl.formatMessage(messages.pinnedComments)}
>
{this.props.pinnedComments.map((comment) => (
<Comment
CommentForm={CommentForm}
intl={this.props.intl}
key={`pinned-comment-${comment.id}`}
view="pinned"
data={comment}
level={0}
disabled={true}
mix="root__pinned-comment"
/>
))}
</div>
)}
<div className={clsx('sort-picker', styles.sortPicker)}>
<SortPicker />
</div>
{!!this.props.topComments.length && !props.isCommentsLoading && (
<div className="root__threads" role="list">
{(IS_MOBILE && commentsShown < this.props.topComments.length
? this.props.topComments.slice(0, commentsShown)
: this.props.topComments
).map((id) => (
<Thread
key={`thread-${id}`}
id={id}
mix="root__thread"
level={0}
getPreview={this.props.getPreview}
/>
))}
{commentsShown < this.props.topComments.length && IS_MOBILE && (
<Button className={clsx('more-comments', styles.moreComments)} onClick={this.showMore}>
<FormattedMessage id="root.show-more" defaultMessage="Show more" />
</Button>
)}
</div>
)}
{props.isCommentsLoading && (
<div className="root__threads" role="list">
<Preloader className="root__preloader" />
</div>
)}
</>
)}
</div>
</Fragment>
);
}
}
const CopyrightLink = (title: string) => (
<a class="root__copyright-link" href="https://remark42.com/">
{title}
</a>
);
/** Root component connected to redux */
export function ConnectedRoot() {
const intl = useIntl();
const props = useSelector(mapStateToProps);
const actions = useActions(boundActions);
const rootRef = useRef<HTMLDivElement>(null);
useEffect(() => {
// TODO: throttle updates
const observer = new MutationObserver(() => {
postMessageToParent({ height: document.body.offsetHeight });
});
observer.observe(rootRef.current, { attributes: true, childList: true, subtree: true });
return () => observer.disconnect();
}, []);
return (
<div className={clsx(b('root', {}, { theme: props.theme }), props.theme)} ref={rootRef}>
<Root {...props} {...actions} intl={intl} />
<p className="root__copyright" role="contentinfo">
<FormattedMessage
id="root.powered-by"
defaultMessage="Powered by <a>Remark42</a>"
values={{ a: CopyrightLink }}
/>
</p>
</div>
);
} | the_stack |
import { debugExec, ensureError, FIXME_NeedsProperType, InternalError, withTmpDir } from "@adpt/utils";
import db from "debug";
import { ExecaReturnValue, Options as ExecaOptions } from "execa";
import fs from "fs-extra";
import ld from "lodash";
import os from "os";
import * as path from "path";
import randomstring from "randomstring";
import shellwords from "shellwords-ts";
import { OmitT, WithPartialT } from "type-ops";
import { isExecaError } from "../common";
import { Config, ContainerStatus, RestartPolicy } from "../Container";
import { Environment, mergeEnvPairs, mergeEnvSimple } from "../env";
import { ImageRefDockerHost, isImageRefDockerhostWithId, mutableImageRef, WithId } from "./image-ref";
import { adaptDockerDeployIDKey } from "./labels";
import {
DockerBuildOptions,
DockerContainerProps,
DockerGlobalOptions,
File,
ImageIdString,
ImageNameString,
Mount,
NameTagString,
RepoDigestString,
} from "./types";
export const debug = db("adapt:cloud:docker");
// Enable with DEBUG=adapt:cloud:docker:out*
export const debugOut = db("adapt:cloud:docker:out");
export const exec = debugExec(debug, debugOut);
export const pickGlobals = (opts: DockerGlobalOptions): DockerGlobalOptions =>
ld.pick(opts, "dockerHost");
export const defaultDockerHost = process.env.DOCKER_HOST ||
os.platform() === "win32" ? "npipe:////./pipe/docker_engine" : "unix:///var/run/docker.sock";
/**
* Common version of busybox to use internally.
* @internal
*/
export const busyboxImage = "busybox:1";
/*
* Staged build utilities
*/
export async function writeFiles(pwd: string, files: File[]) {
// Strip any leading slash
files = files.map((f) => {
return f.path.startsWith("/") ?
{ path: f.path.slice(1), contents: f.contents } :
f;
});
// Make any directories required
const dirs = ld.uniq(files
.map((f) => path.dirname(f.path))
.filter((d) => d !== "."));
await Promise.all(dirs.map(async (d) => fs.mkdirp(path.resolve(pwd, d))));
await Promise.all(files.map(async (f) => {
const contents = ld.isString(f.contents) ? Buffer.from(f.contents) : f.contents;
return fs.writeFile(path.resolve(pwd, f.path), contents);
}));
}
export async function buildFilesImage(files: File[], opts: BuildFilesImageOptions) {
const dockerfile = `
FROM scratch
COPY . /
`;
return withTmpDir(async (dir) => {
await writeFiles(dir, files);
return dockerBuild("-", dir, {
...pickGlobals(opts),
forceRm: true,
imageName: "adapt-tmp-files",
uniqueTag: true,
stdin: dockerfile,
deployID: opts.deployID,
});
}, { prefix: "adapt-docker-build" });
}
export interface BuildFilesImageOptions extends DockerGlobalOptions {
/**
* If set, adds a Docker LABEL to the built image with the DeployID.
*/
deployID?: string;
}
export async function withFilesImage<T>(files: File[] | undefined,
opts: BuildFilesImageOptions,
fn: (img: WithId<ImageRefDockerHost> | undefined) => T | Promise<T>): Promise<T> {
if (!files || files.length === 0) return fn(undefined);
const image = await buildFilesImage(files, opts);
try {
return await fn(image);
} finally {
const { deployID, ...rmOpts } = opts;
// Only remove what we built. If we tagged the image, just remove
// the tag and let Docker decide if the actual image is unused.
// If there's no tag, try delete by ID, but don't warn if that ID
// has been tagged by someone else.
const nameOrId = image.nameTag || image.id;
try {
await dockerRemoveImage({ nameOrId, ...rmOpts });
} catch (err) {
err = ensureError(err);
if (! /image is referenced in multiple repositories/.test(err.message)) {
// tslint:disable-next-line: no-console
console.warn(`Unable to delete temporary Docker image: `, err.message);
}
}
}
}
const buildKitUnsupported = new Map<string | undefined, boolean>();
export interface ExecDockerOptions extends DockerGlobalOptions {
requestBuildKit?: boolean;
stdin?: string;
env?: Environment;
}
/** @internal */
export async function execDocker(argsIn: string[], options: ExecDockerOptions): Promise<ExecaReturnValue> {
const globalArgs = [];
if (options.dockerHost) globalArgs.push("-H", options.dockerHost);
const env = mergeEnvSimple(options.env) || {};
env.DOCKER_BUILDKIT = options.requestBuildKit &&
buildKitUnsupported.get(options.dockerHost) !== true ? "1" : "0";
const args = globalArgs.concat(argsIn);
const execaOpts: ExecaOptions = {
all: true,
input: options.stdin,
env,
};
try {
return await exec("docker", args, execaOpts);
} catch (e) {
if (e.all) {
if (e.all.includes("buildkit not supported by daemon")) {
buildKitUnsupported.set(options.dockerHost, true);
return execDocker(argsIn, options);
}
}
throw e;
}
}
export const defaultDockerBuildOptions = {
requestBuildKit: true,
forceRm: true,
uniqueTag: false,
};
function collectBuildArgs(opts: DockerBuildOptions): string[] {
if (!opts.buildArgs) return [];
const buildArgs = mergeEnvPairs(opts.buildArgs);
if (!buildArgs) return [];
const expanded = buildArgs.map((e) => ["--build-arg", `${e.name}=${e.value}`]);
return ld.flatten(expanded);
}
export async function dockerBuild(
dockerfile: string,
contextPath: string,
options: DockerBuildOptions = {}): Promise<WithId<ImageRefDockerHost>> {
const opts = { ...defaultDockerBuildOptions, ...options };
const mRef = mutableImageRef({
dockerHost: options.dockerHost || "default",
});
const args = ["build", "-f", dockerfile];
if (dockerfile === "-" && !opts.stdin) {
throw new Error(`dockerBuild: stdin option must be set if dockerfile is "-"`);
}
if (opts.forceRm) args.push("--force-rm");
if (opts.uniqueTag && !opts.imageName) {
throw new Error(`dockerBuild: imageName must be set if uniqueTag is true`);
}
if (opts.imageName) {
mRef.path = opts.imageName;
const tag = createTag(opts.imageTag, opts.uniqueTag);
if (tag) mRef.tag = tag;
else if (!opts.uniqueTag && !opts.imageTag) mRef.tag = "latest";
if (!opts.uniqueTag && mRef.nameTag) args.push("-t", mRef.nameTag);
}
if (opts.deployID) {
args.push("--label", `${adaptDockerDeployIDKey}=${opts.deployID}`);
}
const buildArgs = collectBuildArgs(opts);
args.push(...buildArgs);
args.push(contextPath);
const cmdRet = await execDocker(args, opts);
const { stdout, stderr } = cmdRet;
const id = await idFromBuild(stdout, stderr, opts);
if (!id) throw new Error("Could not extract image sha\n" + stdout + "\n\n" + stderr);
mRef.id = id;
if (opts.uniqueTag) {
const prevId = opts.prevUniqueNameTag && await dockerImageId(opts.prevUniqueNameTag, opts);
if (prevId === id) mRef.nameTag = opts.prevUniqueNameTag; // prev points to current id
else {
if (!mRef.nameTag) throw new InternalError(`nameTag not set`);
await dockerTag({
existing: id,
newTag: mRef.nameTag,
...pickGlobals(opts),
});
}
}
const ret = mRef.freeze();
if (!isImageRefDockerhostWithId(ret)) {
throw new InternalError(`Built image reference '${mRef.ref}' is not ` +
`a valid dockerhost reference with ID`);
}
return ret;
}
async function idFromBuild(stdout: string, stderr: string, opts: DockerGlobalOptions) {
let match = /^Successfully built ([0-9a-zA-Z]+)$/mg.exec(stdout);
if (match && match[1]) {
// Legacy docker build output
const id = await dockerImageId(match[1], opts);
if (id == null) throw new Error(`Built image ID not found`);
return id;
}
match = /writing image (sha256:[0-9a-f]+) /m.exec(stderr);
if (match && match[1]) {
// BuildKit output
return match[1];
}
return null;
}
/**
* Fetch the image id for a Docker image
*
* @internal
*/
export async function dockerImageId(name: string, opts: DockerGlobalOptions = {}): Promise<ImageIdString | undefined> {
try {
const inspect = await dockerInspect([name], { type: "image", ...opts });
if (inspect.length > 1) throw new Error(`Multiple images found`);
if (inspect.length === 0) return undefined;
return inspect[0].Id;
} catch (err) {
throw new Error(`Error getting image id for ${name}: ${err.message}`);
}
}
export interface DockerTagOptions extends DockerGlobalOptions {
existing: ImageNameString | ImageIdString;
newTag: NameTagString;
}
export async function dockerTag(options: DockerTagOptions) {
const { existing, newTag } = options;
await execDocker(["tag", existing, newTag], options);
}
export interface DockerRemoveImageOptions extends DockerGlobalOptions {
nameOrId: ImageNameString | ImageIdString;
force?: boolean;
}
const dockerRemoveImageDefaults = {
force: false,
};
export async function dockerRemoveImage(options: DockerRemoveImageOptions) {
const opts = { ...dockerRemoveImageDefaults, ...options };
const args = ["rmi"];
if (opts.force) args.push("--force");
args.push(opts.nameOrId);
await execDocker(args, opts);
}
export function createTag(baseTag: string | undefined, appendUnique: boolean): string | undefined {
if (!baseTag && !appendUnique) return undefined;
let tag = baseTag || "";
if (baseTag && appendUnique) tag += "-";
if (appendUnique) {
tag += randomstring.generate({
length: 8,
charset: "alphabetic",
readable: true,
capitalization: "lowercase",
});
}
return tag;
}
export interface InspectReport extends ContainerStatus { }
export interface NetworkInspectReport {
Id: string;
Created: string;
Name: string;
Driver: string;
Scope: "local" | string;
EnableIPv6: boolean;
IPAM: {
Driver: string,
Options: null | FIXME_NeedsProperType;
Config: FIXME_NeedsProperType[];
};
Internal: boolean;
Attachable: boolean;
Ingress: boolean;
ConfigFrom: {
Network: string;
};
ConfigOnly: boolean;
Containers: {
[id: string]: {
Name: string;
EndpointId: string;
MacAddress: string; //In xx:yy:zz:aa:bb:cc form
IPv4Address: string; //In x.y.z.a/n form
IPv6Address: string; //Can be empty
}
};
Options: {
[name: string]: string; //Values here are always strings
};
}
export interface ImageInspectReport {
Id: string;
Config: Config;
[key: string]: FIXME_NeedsProperType;
}
export interface DockerInspectOptions extends DockerGlobalOptions {
type?: "container" | "image" | "network";
}
/**
* Run docker inspect and return the parsed output
*
* @internal
*/
export async function dockerInspect(namesOrIds: string[], opts: { type: "image" } & DockerInspectOptions):
Promise<ImageInspectReport[]>;
export async function dockerInspect(namesOrIds: string[], opts: { type: "network" } & DockerInspectOptions):
Promise<NetworkInspectReport[]>;
export async function dockerInspect(namesOrIds: string[], opts: { type: "container" } & DockerInspectOptions):
Promise<InspectReport[]>;
export async function dockerInspect(namesOrIds: string[], opts?: DockerInspectOptions):
Promise<InspectReport[] | NetworkInspectReport[] | ImageInspectReport[]>;
export async function dockerInspect(namesOrIds: string[], opts: DockerInspectOptions = {}):
Promise<InspectReport[] | NetworkInspectReport[] | ImageInspectReport[]> {
const execArgs = ["inspect"];
if (opts.type) execArgs.push(`--type=${opts.type}`);
let inspectRet: ExecaReturnValue<string>;
try {
inspectRet = await execDocker([...execArgs, ...namesOrIds], opts);
} catch (e) {
if (isExecaError(e) && e.stderr.startsWith("Error: No such")) {
inspectRet = e;
} else throw e;
}
try {
const inspect = JSON.parse(inspectRet.stdout);
if (!Array.isArray(inspect)) throw new Error(`docker inspect result is not an array`);
return inspect;
} catch (err) {
throw new Error(`Error inspecting docker objects ${namesOrIds}: ${err.message}`);
}
}
export type NetworkReport = NetworkInspectReport[];
/**
* Return a list of all network names
*
* @internal
*/
export async function dockerNetworkLs(opts: DockerGlobalOptions): Promise<string[]> {
const result = await execDocker(["network", "ls", "--format", "{{json .Name}}"], opts);
const ret: string[] = [];
for (const line of result.stdout.split("\n")) {
ret.push(JSON.parse(line));
}
return ret;
}
/**
* Return all networks and their inspect reports
*
* @internal
*/
export async function dockerNetworks(opts: DockerGlobalOptions): Promise<NetworkReport> {
const networks = await dockerNetworkLs(opts);
return dockerInspect(networks, { ...opts, type: "network" });
}
/**
* Run docker stop
*
* @internal
*/
export async function dockerStop(namesOrIds: string[], opts: DockerGlobalOptions): Promise<void> {
const args = ["stop", ...namesOrIds];
await execDocker(args, opts);
}
/**
* Run docker rm
*
* @internal
*/
export async function dockerRm(namesOrIds: string[], opts: DockerGlobalOptions): Promise<void> {
const args = ["rm", ...namesOrIds];
await execDocker(args, opts);
}
/**
* Options for {@link docker.dockerRun}
*
* @internal
*/
export interface DockerRunOptions extends OmitT<WithPartialT<DockerContainerProps, "dockerHost">, "networks"> {
background?: boolean;
name?: string;
image: ImageNameString;
network?: string;
}
const defaultDockerRunOptions = {
background: true,
privileged: false,
};
/**
* Run a container via docker run
*
* @internal
*/
export async function dockerRun(options: DockerRunOptions) {
const opts = { ...defaultDockerRunOptions, ...options };
const {
background, labels, mounts, name,
portBindings, ports, privileged, restartPolicy,
} = opts;
const args: string[] = ["run"];
if (privileged) args.push("--privileged");
if (background) args.push("-d");
if (name) args.push("--name", name);
if (labels) {
for (const l of Object.keys(labels)) {
args.push("--label", `${l}=${labels[l]}`); //FIXME(manishv) better quoting/format checking here
}
}
if (opts.autoRemove) args.push("--rm");
if (portBindings) {
const portArgs = Object.keys(portBindings).map((k) => `-p${portBindings[k]}:${k}`);
args.push(...portArgs);
}
if (opts.stopSignal) args.push("--stop-signal", opts.stopSignal);
if (opts.network !== undefined) {
args.push("--network", opts.network);
}
if (opts.environment !== undefined) {
const envPairs = mergeEnvPairs(opts.environment);
if (envPairs) {
for (const evar of envPairs) {
args.push("-e", `${evar.name}=${evar.value}`);
}
}
}
if (ports) args.push(...ports.map((p) => `--expose=${p}`));
args.push(...restartPolicyArgs(restartPolicy));
if (mounts) args.push(...ld.flatten(mounts.map(mountArgs)));
args.push(opts.image);
if (typeof opts.command === "string") args.push(...shellwords.split(opts.command));
if (Array.isArray(opts.command)) args.push(...opts.command);
return execDocker(args, opts);
}
function restartPolicyArgs(policy: RestartPolicy | undefined) {
if (!policy) return [];
switch (policy.name) {
case undefined:
case "" as RestartPolicy["name"]:
case "Never":
return [];
case "Always":
return ["--restart=always"];
case "UnlessStopped":
return ["--restart=unless-stopped"];
case "OnFailure":
const max = policy.maximumRetryCount ? ":" + policy.maximumRetryCount : "";
return [`--restart=on-failure${max}`];
default:
throw new Error(`Invalid RestartPolicy name '${policy.name}'`);
}
}
const stringVal = (key: string) => (val: any) => `${key}=${val}`;
type MountArgTransform = {
[K in keyof Mount]: (val: Mount[K]) => string;
};
const mountArgTransform: MountArgTransform = {
type: stringVal("type"),
source: stringVal("source"),
destination: stringVal("destination"),
readonly: (val) => val ? "readonly" : "",
propagation: stringVal("propagation"),
};
function mountArgs(mount: Mount): string[] {
const items: string[] = [];
for (const [k, v] of Object.entries(mount)) {
const xform = (mountArgTransform as any)[k];
if (!xform) {
throw new Error(`Invalid mount property '${k}'`);
}
items.push(xform(v));
}
return [ "--mount", items.join(",")];
}
/**
* Attach containers to given networks
*
* @internal
*/
export async function dockerNetworkConnect(containerNameOrId: string, networks: string[],
opts: { alreadyConnectedError?: boolean } & ExecDockerOptions) {
const optsWithDefs = { alreadyConnectedError: true, ...opts };
const { alreadyConnectedError, ...execOpts } = optsWithDefs;
const alreadyConnectedRegex =
new RegExp(`^Error response from daemon: endpoint with name ${containerNameOrId} already exists in network`);
for (const net of networks) {
try {
await execDocker(["network", "connect", net, containerNameOrId], execOpts);
} catch (e) {
if (!alreadyConnectedError && isExecaError(e)) {
if (alreadyConnectedRegex.test(e.stderr)) continue;
}
throw e;
}
}
}
/**
* Detach containers from given networks
*
* @internal
*/
export async function dockerNetworkDisconnect(containerNameOrId: string, networks: string[],
opts: { alreadyDisconnectedError?: boolean } & ExecDockerOptions) {
const optsWithDefs = { alreadyDisconnectedError: true, ...opts };
const { alreadyDisconnectedError, ...execOpts } = optsWithDefs;
for (const net of networks) {
try {
await execDocker(["network", "disconnect", net, containerNameOrId], execOpts);
} catch (e) {
if (!alreadyDisconnectedError && isExecaError(e)) {
if (/^Error response from daemon: container [0-9a-fA-F]+ is not connected to network/.test(e.stderr) ||
(new RegExp(`^Error response from daemon: network ${net} not found`).test(e.stderr))) {
continue;
}
}
throw e;
}
}
}
/**
* Options for dockerPush.
*
* @internal
*/
export interface DockerPushOptions extends DockerGlobalOptions {
nameTag: NameTagString;
}
/**
* Push an image to a registry
*
* @internal
*/
export async function dockerPush(opts: DockerPushOptions): Promise<void> {
const args: string[] = ["push", opts.nameTag];
await execDocker(args, opts);
}
/**
* Options for dockerPull.
*
* @internal
*/
export interface DockerPullOptions extends DockerGlobalOptions {
/**
* Image to pull.
* @remarks
* See {@link docker.ImageNameString} for more details. If the registry
* portion of imageName is absent, the official Docker registry is
* assumed.
*/
imageName: ImageNameString;
}
/**
* Information about an image that has been successfully pulled from a
* registry.
*
* @internal
*/
export interface DockerPullInfo {
id: ImageIdString;
registryDigest: RepoDigestString;
}
/**
* Push an image to a registry
*
* @internal
*/
export async function dockerPull(opts: DockerPullOptions): Promise<DockerPullInfo> {
const args: string[] = ["pull", opts.imageName];
const repo = removeTag(opts.imageName);
const { stdout } = await execDocker(args, opts);
const m = stdout.match(/Digest:\s+(\S+)/);
if (!m) throw new Error(`Output from docker pull did not contain Digest. Output:\n${stdout}`);
const registryDigest = `${repo}@${m[1]}`;
const info = await dockerInspect([registryDigest], { type: "image", ...pickGlobals(opts) });
if (info.length !== 1) {
throw new Error(`Unexpected number of images (${info.length}) match ${registryDigest}`);
}
return {
id: info[0].Id,
registryDigest,
};
}
/**
* Given a *valid* ImageNameString, removes the optional tag and returns only the
* `[registry/]repo` portion.
* NOTE(mark): This does not attempt to be a generic parser for all Docker
* image name strings because there's ambiguity in how to parse that requires
* context of where it came from or which argument of which CLI it is.
*/
function removeTag(imageName: ImageNameString): ImageNameString {
const parts = imageName.split(":");
switch (parts.length) {
case 1:
// 0 colons - no tag present
break;
case 2:
// 1 colon - Could be either from hostname:port or :tag
// If it's hostname:port, then parts[1] *must* include a slash
// else it's a tag, so dump it.
if (!parts[1].includes("/")) parts.pop();
break;
case 3:
// 2 colons - last part is the tag
parts.pop();
break;
default:
throw new Error(`Invalid docker image name '${imageName}'`);
}
return parts.join(":");
} | the_stack |
import * as fs from "fs";
import * as path from "path";
import * as vs from "vscode";
import { DartCapabilities } from "../../shared/capabilities/dart";
import { flutterPath } from "../../shared/constants";
import { LogCategory } from "../../shared/enums";
import { CustomScript, DartSdks, DartWorkspaceContext, IAmDisposable, Logger, SpawnedProcess } from "../../shared/interfaces";
import { logProcess } from "../../shared/logging";
import { getPubExecutionInfo } from "../../shared/processes";
import { disposeAll, nullToUndefined, PromiseCompleter, usingCustomScript } from "../../shared/utils";
import { fsPath } from "../../shared/utils/fs";
import { Context } from "../../shared/vscode/workspace";
import { config } from "../config";
import { DartSdkManager, FlutterSdkManager } from "../sdk/sdk_manager";
import * as util from "../utils";
import { getGlobalFlutterArgs, safeToolSpawn } from "../utils/processes";
import { getFolderToRunCommandIn } from "../utils/vscode/projects";
import * as channels from "./channels";
export const packageNameRegex = new RegExp("^[a-z][a-z0-9_]*$");
// TODO: Find a better way/place for this.
export const commandState = {
numProjectCreationsInProgress: 0,
promptToReloadOnVersionChanges: true,
};
export class BaseSdkCommands implements IAmDisposable {
protected readonly sdks: DartSdks;
protected readonly disposables: vs.Disposable[] = [];
// A map of any in-progress commands so we can terminate them if we want to run another.
private runningCommands: { [workspaceUriAndCommand: string]: ChainedProcess | undefined } = {};
constructor(protected readonly logger: Logger, protected readonly context: Context, protected readonly workspace: DartWorkspaceContext, protected readonly dartCapabilities: DartCapabilities) {
this.sdks = workspace.sdks;
}
protected async runCommandForWorkspace(
handler: (folder: string, args: string[], shortPath: string, alwaysShowOutput: boolean) => Thenable<number | undefined>,
placeHolder: string,
args: string[],
selection: vs.Uri | undefined,
alwaysShowOutput = false,
): Promise<number | undefined> {
const folderToRunCommandIn = await getFolderToRunCommandIn(this.logger, placeHolder, selection);
if (!folderToRunCommandIn)
return;
const containingWorkspace = vs.workspace.getWorkspaceFolder(vs.Uri.file(folderToRunCommandIn));
if (!containingWorkspace) {
this.logger.error(`Failed to get workspace folder for ${folderToRunCommandIn}`);
throw new Error(`Failed to get workspace folder for ${folderToRunCommandIn}`);
}
const containingWorkspacePath = fsPath(containingWorkspace.uri);
// Display the relative path from the workspace root to the folder we're running, or if they're
// the same then the folder name we're running in.
const shortPath = path.relative(containingWorkspacePath, folderToRunCommandIn)
|| path.basename(folderToRunCommandIn);
return handler(folderToRunCommandIn, args, shortPath, alwaysShowOutput);
}
protected runFlutter(args: string[], selection: vs.Uri | undefined, alwaysShowOutput = false): Thenable<number | undefined> {
return this.runCommandForWorkspace(this.runFlutterInFolder.bind(this), `Select the folder to run "flutter ${args.join(" ")}" in`, args, selection, alwaysShowOutput);
}
protected runFlutterInFolder(folder: string, args: string[], shortPath: string | undefined, alwaysShowOutput = false, customScript?: CustomScript): Thenable<number | undefined> {
if (!this.sdks.flutter)
throw new Error("Flutter SDK not available");
const execution = usingCustomScript(
path.join(this.sdks.flutter, flutterPath),
args,
customScript,
);
const allArgs = getGlobalFlutterArgs()
.concat(config.for(vs.Uri.file(folder)).flutterAdditionalArgs)
.concat(execution.args);
return this.runCommandInFolder(shortPath, folder, execution.executable, allArgs, alwaysShowOutput);
}
protected runPub(args: string[], selection: vs.Uri | undefined, alwaysShowOutput = false): Thenable<number | undefined> {
return this.runCommandForWorkspace(this.runPubInFolder.bind(this), `Select the folder to run "pub ${args.join(" ")}" in`, args, selection, alwaysShowOutput);
}
protected runPubInFolder(folder: string, args: string[], shortPath: string, alwaysShowOutput = false): Thenable<number | undefined> {
if (!this.sdks.dart)
throw new Error("Dart SDK not available");
args = args.concat(...config.for(vs.Uri.file(folder)).pubAdditionalArgs);
const pubExecution = getPubExecutionInfo(this.dartCapabilities, this.sdks.dart, args);
return this.runCommandInFolder(shortPath, folder, pubExecution.executable, pubExecution.args, alwaysShowOutput);
}
protected runCommandInFolder(shortPath: string | undefined, folder: string, binPath: string, args: string[], alwaysShowOutput: boolean): Thenable<number | undefined> {
shortPath = shortPath || path.basename(folder);
const commandName = path.basename(binPath).split(".")[0]; // Trim file extension.
const channel = channels.getOutputChannel(`${commandName} (${shortPath})`, true);
if (alwaysShowOutput)
channel.show();
// Figure out if there's already one of this command running, in which case we'll chain off the
// end of it.
const commandId = `${folder}|${commandName}|${args}`;
const existingProcess = this.runningCommands[commandId];
if (existingProcess && !existingProcess.hasStarted) {
// We already have a queued version of this command so there's no value in queueing another
// just bail.
return Promise.resolve(undefined);
}
return vs.window.withProgress({
cancellable: true,
location: vs.ProgressLocation.Notification,
title: `${commandName} ${args.join(" ")}`,
}, (progress, token) => {
if (existingProcess) {
progress.report({ message: "terminating previous command..." });
existingProcess.cancel();
} else {
channel.clear();
}
const process = new ChainedProcess(() => {
channel.appendLine(`[${shortPath}] ${commandName} ${args.join(" ")}`);
progress.report({ message: "running..." });
const proc = safeToolSpawn(folder, binPath, args);
channels.runProcessInOutputChannel(proc, channel);
this.logger.info(`(PROC ${proc.pid}) Spawned ${binPath} ${args.join(" ")} in ${folder}`, LogCategory.CommandProcesses);
logProcess(this.logger, LogCategory.CommandProcesses, proc);
// If we complete with a non-zero code, or don't complete within 10s, we should show
// the output pane.
const completedWithErrorPromise = new Promise((resolve) => proc.on("close", resolve));
const timedOutPromise = new Promise((resolve) => setTimeout(() => resolve(true), 10000));
// tslint:disable-next-line: no-floating-promises
Promise.race([completedWithErrorPromise, timedOutPromise]).then((showOutput) => {
if (showOutput)
channel.show(true);
});
return proc;
}, existingProcess);
this.runningCommands[commandId] = process;
token.onCancellationRequested(() => process.cancel());
return process.completed;
});
}
public dispose(): any {
disposeAll(this.disposables);
}
}
export class SdkCommands extends BaseSdkCommands {
constructor(logger: Logger, context: Context, workspace: DartWorkspaceContext, dartCapabilities: DartCapabilities) {
super(logger, context, workspace, dartCapabilities);
const dartSdkManager = new DartSdkManager(this.logger, this.workspace.sdks);
this.disposables.push(vs.commands.registerCommand("dart.changeSdk", () => dartSdkManager.changeSdk()));
if (workspace.hasAnyFlutterProjects) {
const flutterSdkManager = new FlutterSdkManager(this.logger, workspace.sdks);
this.disposables.push(vs.commands.registerCommand("dart.changeFlutterSdk", () => flutterSdkManager.changeSdk()));
}
// Monitor version files for SDK upgrades.
this.setupVersionWatcher();
}
private async setupVersionWatcher() {
// On Windows, the watcher sometimes fires even if the file wasn't modified (could be when
// accessed), so we need to filter those out. We can't just check the modified time is "recent"
// because the unzip preserves the modification dates of the SDK. Instead, we'll capture the mtime
// of the file at start, and then fire only if that time actually changes.
const versionFile = path.join(this.sdks.dart, "version");
const getModifiedTimeMs = async () => {
try {
return (await fs.promises.stat(versionFile)).mtime.getTime();
} catch (error) {
this.logger.warn(`Failed to check modification time on version file. ${error}`);
return;
}
};
let lastModifiedTime = await getModifiedTimeMs();
// If we couldn't get the initial modified time, we can't track this.
if (!lastModifiedTime)
return;
const watcher = fs.watch(versionFile, { persistent: false }, async (eventType: string) => {
if (!commandState.promptToReloadOnVersionChanges)
return;
const newModifiedTime = await getModifiedTimeMs();
// Bail if we couldn't get a new modified time, or it was the same as the last one.
if (!newModifiedTime || newModifiedTime === lastModifiedTime)
return;
lastModifiedTime = newModifiedTime;
// Ensure we don't fire too often as some OSes may generate multiple events.
commandState.promptToReloadOnVersionChanges = false;
// Allow it again in 60 seconds.
setTimeout(() => commandState.promptToReloadOnVersionChanges = true, 60000);
// Wait a short period before prompting.
setTimeout(() => util.promptToReloadExtension("Your Dart SDK has been updated. Reload using the new SDK?", undefined, false), 1000);
});
this.disposables.push({ dispose() { watcher.close(); } });
}
}
export function markProjectCreationStarted(): void {
commandState.numProjectCreationsInProgress++;
}
export function markProjectCreationEnded(): void {
commandState.numProjectCreationsInProgress--;
}
class ChainedProcess {
private static processNumber = 1;
public processNumber = ChainedProcess.processNumber++;
private completer: PromiseCompleter<number | undefined> = new PromiseCompleter<number | undefined>();
public readonly completed = this.completer.promise;
public process: SpawnedProcess | undefined;
private isCancelled = false;
public get hasStarted() {
return this.process !== undefined;
}
constructor(private readonly spawn: () => SpawnedProcess, parent: ChainedProcess | undefined) {
// We'll either start immediately, or if given a parent process only when it completes.
if (parent) {
// tslint:disable-next-line: no-floating-promises
parent.completed.then(() => this.start());
} else {
this.start();
}
}
public start(): void {
if (this.process)
throw new Error(`${this.processNumber} Can't start an already started process!`);
if (this.isCancelled) {
this.completer.resolve(undefined);
return;
}
this.process = this.spawn();
this.process.on("close", (code) => this.completer.resolve(nullToUndefined(code)));
}
public cancel(): void {
this.isCancelled = true;
}
} | the_stack |
import { Component, ElementRef, ViewChild } from '@angular/core';
import { GameService } from '../game.service';
import { DiscourseAuthModalState, DiscourseService, DiscourseUser } from '../discourse.service';
@Component({
selector: 'auth-modal',
templateUrl: './auth-modal.component.html',
styleUrls: ['./auth-modal.component.scss']
})
export class AuthModalComponent {
modalVisible: boolean;
accountInfoLoaded = false;
login = true;
register = false;
completeRegister = false;
completeAuth = false;
authOnWeb = false;
displayTOTP = false;
notification: string | null = null;
btnActive = false;
typedEmail = '';
typedPassword = '';
typedUserName = '';
typedTOTP = '';
emailHint = 'Never shown to the public';
usernameHint = 'Unique, No Spaces, Short';
passwordHint = 'At least 8 characters';
emailValid = false;
usernameValid = false;
passwordValid = false;
streamerMode: boolean = true;
showIgnoreButton: boolean = false;
@ViewChild('modal')
modal: ElementRef;
currentAccount: DiscourseUser | null = null;
get isLoginScreen(): boolean {
return this.accountInfoLoaded && !this.completeAuth && !this.completeRegister && !this.register && this.login;
}
get isRegisterScreen(): boolean {
return this.accountInfoLoaded && !this.completeAuth && !this.completeRegister && !this.login && this.register;
}
get isAuthCompleteScreen(): boolean {
return this.accountInfoLoaded && this.completeAuth;
}
get isRegistrationCompleteScreen(): boolean {
return this.accountInfoLoaded && this.completeRegister;
}
get canBrowser() {
// since we only have a `fivem://` protocol handler at this time, we can't handle browser
// sign-in on other games.
return this.gameService.gameName === 'gta5';
}
constructor(
private gameService: GameService,
private discourseService: DiscourseService,
) {
this.streamerMode = gameService.streamerMode;
this.showIgnoreButton = discourseService.authModalState.getValue() === DiscourseAuthModalState.SHOWN;
this.discourseService.authModalOpenChange.subscribe((authModalOpen) => this.modalVisible = authModalOpen);
this.gameService.streamerModeChange.subscribe((streamerMode) => this.streamerMode = streamerMode);
this.discourseService.authModalState.subscribe((state) => {
if (state === DiscourseAuthModalState.SHOWN) {
this.showIgnoreButton = true;
}
});
this.discourseService.signinChange.subscribe((user) => this.currentAccount = user);
this.discourseService.initialAuthComplete.subscribe((complete) => {
this.accountInfoLoaded = complete;
});
}
toggleModalVisiblityOuter(event: MouseEvent) {
if (event.target === this.modal?.nativeElement) {
this.toggleModalVisiblity();
}
}
toggleModalVisiblity(): void {
let analyticsName: string;
if (this.isLoginScreen) {
analyticsName = 'LoginScreen';
} else if (this.isRegisterScreen) {
analyticsName = 'RegistrationScreen';
}
this.discourseService.closeAuthModal(analyticsName);
}
dontAskAgain() {
this.discourseService.closeAuthModalAndIgnore();
}
setLoginView(): void {
this.login = true;
this.register = false;
this.completeRegister = false;
this.completeAuth = false;
this.typedEmail = '';
this.typedPassword = '';
this.typedUserName = '';
this.emailValid = false;
this.usernameValid = false;
this.passwordValid = false;
}
setRegisterView(): void {
this.register = true;
this.login = false;
this.completeRegister = false;
this.completeAuth = false;
this.typedEmail = '';
this.typedPassword = '';
this.typedUserName = '';
}
validateEmail(email: string) {
const re = /^[a-zA-Z0-9!#\$%&'*+\/=?\^_`{|}~\-]+(?:\.[a-zA-Z0-9!#\$%&'\*+\/=?\^_`{|}~\-]+)*@(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z0-9](?:[a-zA-Z0-9\-]*[a-zA-Z0-9])?$/;
return re.test(email);
}
async onEmailChange() {
if (this.typedEmail === '') {
this.emailValid = false;
this.emailHint = 'Never shown to the public';
} else if (this.validateEmail(this.typedEmail)) {
const emailAvailable = await this.discourseService.checkValidEmail(this.typedEmail);
if (emailAvailable.success) {
this.emailValid = true;
this.emailHint = 'Valid Email Address';
} else {
this.emailValid = false;
this.emailHint = emailAvailable.errors[0];
}
} else {
this.emailValid = false;
this.emailHint = 'Please enter a valid email';
}
}
async onUsernameChange() {
if (this.typedUserName === '') {
this.usernameValid = false;
this.usernameHint = 'Unique, No Spaces, Short';
} else if (this.typedUserName.length < 3) {
this.usernameValid = false;
this.usernameHint = 'Your username is too short';
} else {
const usernameAvailable = await this.discourseService.checkValidUsername(this.typedUserName);
if (usernameAvailable.available === false) {
this.usernameValid = false;
this.usernameHint = `Not available. Try ${usernameAvailable.suggestion}`;
} else {
this.usernameValid = true;
this.usernameHint = `Your username is available`;
}
}
}
onPasswordChange(): void {
if (this.typedPassword === '') {
this.passwordValid = false;
} else {
this.typedPassword.length >= 8 ? this.passwordValid = true : this.passwordValid = false;
}
}
async oneClickDiscourseLogin() {
this.preventBtnSpam(async () => {
const url = await this.discourseService.generateAuthURL();
this.gameService.openUrl(url);
this.authOnWeb = true;
this.completeAuth = true;
}, 3000);
}
async traditionalDiscourseLogin() {
this.preventBtnSpam(async () => {
try {
const user = await this.discourseService.login(this.typedEmail, this.typedPassword, this.typedTOTP);
if (user.error) {
if (user.reason === 'invalid_second_factor' && this.typedTOTP == '') {
if (this.handleTFA(user)) {
return;
}
}
this.setNotification(user.error, 7000);
return;
}
this.completeRegister = false;
this.completeAuth = true;
} catch {
this.setNotification('Error signing In - Try again later', 7000);
}
}, 2000);
}
handleTFA(error: any) {
if (error.totp_enabled) {
this.displayTOTP = true;
return true;
}
return false;
}
forgotDiscoursePassword(): void {
this.preventBtnSpam(async () => {
if (this.typedEmail === '' || !this.validateEmail(this.typedEmail)) {
this.setNotification('Please enter a valid email to reset password', 7000);
return;
}
try {
await this.discourseService.resetPassword(this.typedEmail);
this.setNotification('Password reset email sent', 7000);
} catch {
this.setNotification('Failed to send reset email - Try reset on forum.cfx.re', 7000);
}
}, 2000);
}
async registerDiscourseAccount() {
this.preventBtnSpam(async () => {
const newUserInfo = {
'email': this.typedEmail,
'username': this.typedUserName,
'password': this.typedPassword
};
const newUser = await this.discourseService.registerNewUser(newUserInfo);
if (newUser.success) {
this.completeRegister = true;
} else if (newUser.success === false) {
const errs = newUser.errors;
const allErrsArray = [];
for (const err in errs) {
errs[err].forEach(e => {
if (typeof e === 'string') {
allErrsArray.push(e);
}
});
}
const notificationMsg = allErrsArray[0] === undefined ? 'Account Creation Failed. Try Again Later' : allErrsArray.join(' ');
this.setNotification(notificationMsg, 5000);
} else {
this.setNotification('Account Creation Failed. Try Again Later', 5000);
}
}, 2000);
}
async resendDiscourseActivationEmail() {
this.preventBtnSpam(async () => {
try {
await this.discourseService.resendActivationEmail(this.typedUserName);
this.setNotification('Activation Email Sent', 5000);
} catch {
this.setNotification('Failed to resend Activation Email Sent', 5000);
}
}, 2000);
}
setNotification(notiMsg: string, notiDisplayTime: number): void {
this.notification = notiMsg;
setTimeout(() => {
this.notification = null;
}, notiDisplayTime);
}
preventBtnSpam(fn, time: number): void {
if (!this.btnActive) {
this.btnActive = true;
fn();
setTimeout(() => {
this.btnActive = false;
}, time);
} else {
return;
}
}
} | the_stack |
import { uuid } from 'uuidv4'
import { Inject, Injectable, CACHE_MANAGER } from '@nestjs/common'
import { Discount } from './discount.model'
import { Flight } from '../flight'
import {
CONNECTING_FLIGHT_GRACE_PERIOD,
REYKJAVIK_FLIGHT_CODES,
} from '../flight/flight.service'
import { ConnectionDiscountCode } from '@island.is/air-discount-scheme/types'
interface CachedDiscount {
discountCode: string
connectionDiscountCodes: ConnectionDiscountCode[]
nationalId: string
}
export const DISCOUNT_CODE_LENGTH = 8
const ONE_DAY = 24 * 60 * 60
const CACHE_KEYS = {
user: (nationalId: string) => `discount_user_lookup_${nationalId}`,
discount: (id: string) => `discount_id_${id}`,
discountCode: (discountCode: string) =>
`discount_code_lookup_${discountCode}`,
connectionDiscountCode: (code: string) => `connection_discount_${code}`,
flight: (flightId: string) => `discount_flight_lookup_${flightId}`,
}
@Injectable()
export class DiscountService {
constructor(
@Inject(CACHE_MANAGER) private readonly cacheManager: CacheManager,
) {}
private getRandomRange(min: number, max: number): number {
return Math.random() * (max + 1 - min) + min
}
private generateDiscountCode(): string {
return [...Array(DISCOUNT_CODE_LENGTH)]
.map(() => {
// We are excluding 0 and O because users mix them up
const charCodes = [
this.getRandomRange(49, 57), // 1 - 9
this.getRandomRange(65, 78), // A - N
this.getRandomRange(80, 90), // P - Z
]
const randomCharCode =
charCodes[Math.floor(Math.random() * charCodes.length)]
return String.fromCharCode(randomCharCode)
})
.join('')
}
private async setCache<T>(
key: string,
value: T,
ttl: number = ONE_DAY,
): Promise<void> {
return this.cacheManager.set(key, value, { ttl })
}
private async getCache<T>(cacheKey: string): Promise<T | null> {
const cacheId = await this.cacheManager.get(cacheKey)
if (!cacheId) {
return null
}
return this.cacheManager.get(cacheId)
}
async createDiscountCode(
nationalId: string,
connectableFlights: Flight[],
): Promise<Discount> {
const discountCode = this.generateDiscountCode()
const cacheId = CACHE_KEYS.discount(uuid())
let connectionDiscountCodes: ConnectionDiscountCode[] = []
const previousCacheId = CACHE_KEYS.user(nationalId)
const previousCache = await this.getCache<CachedDiscount>(previousCacheId)
let previousFlightIds: string[] = []
if (previousCache?.connectionDiscountCodes) {
connectionDiscountCodes = previousCache.connectionDiscountCodes
previousFlightIds = connectionDiscountCodes.map((cdc) => cdc.flightId)
}
const connectableFlightCounts = connectableFlights.length
// Create a discount code per every new flightId
for (let i = 0; i < connectableFlightCounts; i++) {
const flight = connectableFlights.pop()
if (flight && !previousFlightIds.includes(flight.id)) {
const flightLegsCount = flight.flightLegs.length
let validUntil = new Date(
Date.parse(flight.flightLegs[0].date.toString()),
)
if (REYKJAVIK_FLIGHT_CODES.includes(flight.flightLegs[0].origin)) {
validUntil = new Date(
validUntil.getTime() + CONNECTING_FLIGHT_GRACE_PERIOD,
)
} else if (
REYKJAVIK_FLIGHT_CODES.includes(
flight.flightLegs[flightLegsCount - 1].destination,
)
) {
validUntil = new Date(validUntil.getTime())
}
const flightId = flight.id
const flightDesc = `${flight.flightLegs[0].origin}-${
flight.flightLegs[flightLegsCount - 1].destination
}`
const connectionDiscountCode = this.generateDiscountCode()
// Point every connection discount code to the cache id for lookup later
await this.setCache<string>(
CACHE_KEYS.connectionDiscountCode(connectionDiscountCode),
cacheId,
)
connectionDiscountCodes.push({
code: connectionDiscountCode,
flightId,
flightDesc,
validUntil: validUntil.toISOString(),
})
}
}
// Filter out connectionCodes that are expired in their validity
const now = new Date(Date.now())
connectionDiscountCodes = connectionDiscountCodes.filter((cdc) => {
const validUntil = new Date(Date.parse(cdc.validUntil))
return now < validUntil
})
await this.setCache<CachedDiscount>(cacheId, {
nationalId,
discountCode,
connectionDiscountCodes,
})
await this.setCache<string>(CACHE_KEYS.discountCode(discountCode), cacheId)
await this.setCache<string>(CACHE_KEYS.user(nationalId), cacheId)
return new Discount(
discountCode,
connectionDiscountCodes,
nationalId,
ONE_DAY,
)
}
async getDiscountByNationalId(nationalId: string): Promise<Discount | null> {
const cacheKey = CACHE_KEYS.user(nationalId)
const cacheValue = await this.getCache<CachedDiscount>(cacheKey)
if (!cacheValue) {
return null
}
// If the cached discountCode has been used then the nationalId cache
// is invalid and needs to be regenerated by the resolver
const discountCodeCacheKey = CACHE_KEYS.discountCode(
cacheValue.discountCode,
)
if (!(await this.getCache<CachedDiscount>(discountCodeCacheKey))) {
return null
}
const ttl = await this.cacheManager.ttl(cacheKey)
return new Discount(
cacheValue.discountCode,
cacheValue.connectionDiscountCodes ?? [],
nationalId,
ttl,
)
}
async getDiscountByDiscountCode(
discountCode: string,
): Promise<Discount | null> {
const cacheKey = CACHE_KEYS.discountCode(discountCode)
const cacheValue = await this.getCache<CachedDiscount>(cacheKey)
if (!cacheValue) {
return await this.getDiscountByConnectionDiscountCode(discountCode)
}
const ttl = await this.cacheManager.ttl(cacheKey)
return new Discount(
discountCode,
cacheValue.connectionDiscountCodes ?? [],
cacheValue.nationalId,
ttl,
)
}
async getDiscountByConnectionDiscountCode(
discountCode: string,
): Promise<Discount | null> {
const cacheKey = CACHE_KEYS.connectionDiscountCode(discountCode)
const cacheValue = await this.getCache<CachedDiscount>(cacheKey)
if (!cacheValue) {
return null
}
const connectionDiscountCode = this.filterConnectionDiscountCodes(
cacheValue.connectionDiscountCodes,
discountCode,
)
if (!connectionDiscountCode) {
return null
}
const ttl = await this.cacheManager.ttl(cacheKey)
return new Discount(
cacheValue.discountCode,
cacheValue.connectionDiscountCodes ?? [],
cacheValue.nationalId,
ttl,
)
}
filterConnectionDiscountCodes(
connectionDiscountCodes: ConnectionDiscountCode[],
discountCode: string,
): ConnectionDiscountCode | null {
// Return nothing if the connection discount code is expired
const now = new Date(Date.now())
connectionDiscountCodes = connectionDiscountCodes.filter((cdc) => {
if (cdc.code === discountCode) {
const validUntil = new Date(Date.parse(cdc.validUntil))
return now < validUntil
} else {
return false
}
})
if (connectionDiscountCodes.length === 0) {
return null
}
return connectionDiscountCodes[0]
}
async useDiscount(
discountCode: string,
nationalId: string,
flightId: string,
isConnectionDiscount: boolean,
): Promise<void> {
const cacheKey = CACHE_KEYS.user(nationalId)
const cacheValue = await this.getCache<CachedDiscount>(cacheKey)
if (isConnectionDiscount) {
if (cacheValue?.connectionDiscountCodes) {
let markedDiscountCode = null
for (const connectionDiscountCode of cacheValue.connectionDiscountCodes) {
if (connectionDiscountCode.code === discountCode) {
markedDiscountCode = connectionDiscountCode
}
}
// Remove connection discount code from cache if found
if (markedDiscountCode) {
cacheValue.connectionDiscountCodes.splice(
cacheValue.connectionDiscountCodes.indexOf(markedDiscountCode),
1,
)
await this.cacheManager.del(
CACHE_KEYS.connectionDiscountCode(markedDiscountCode.code),
)
}
await this.setCache<CachedDiscount>(cacheKey, cacheValue)
}
} else {
const discountCacheKey = CACHE_KEYS.discountCode(discountCode)
const cacheId = await this.cacheManager.get(discountCacheKey)
await this.cacheManager.del(discountCacheKey)
const ttl = await this.cacheManager.ttl(cacheId)
await this.setCache<string>(CACHE_KEYS.flight(flightId), cacheId, ttl)
}
}
// When an airline booking has a payment failure, they have already registered
// the flight with us and used the discount. To avoid making the user get a
// new discount, we reactivate the discount here only if the flight is
// cancelled before the ttl on the discount code expires.
async reactivateDiscount(flightId: string): Promise<void> {
const usedDiscountCacheKey = CACHE_KEYS.flight(flightId)
const cacheId = await this.cacheManager.get(usedDiscountCacheKey)
if (!cacheId) {
return
}
await this.cacheManager.del(usedDiscountCacheKey)
const cacheValue: CachedDiscount = await this.cacheManager.get(cacheId)
if (!cacheValue) {
return
}
const ttl = await this.cacheManager.ttl(cacheId)
// Point the discount code back to the old cache
await this.setCache<CachedDiscount>(cacheId, cacheValue)
await this.setCache<string>(
CACHE_KEYS.discountCode(cacheValue.discountCode),
cacheId,
ttl,
)
// Move the user lookup pointer to the old cache too
// TODO: cache is not invalidated entirely on this
// deletion endpoint
await this.setCache<string>(
CACHE_KEYS.user(cacheValue.nationalId),
cacheId,
ttl,
)
}
} | the_stack |
import minimatch from 'minimatch';
import path from 'path';
import _ from 'lodash';
import R from 'ramda';
import { BitId, BitIds } from '../../../../bit-id';
import {
COMPONENT_ORIGINS,
DEPENDENCIES_FIELDS,
MANUALLY_ADD_DEPENDENCY,
MANUALLY_REMOVE_DEPENDENCY,
OVERRIDE_COMPONENT_PREFIX,
} from '../../../../constants';
import Consumer from '../../../../consumer/consumer';
import logger from '../../../../logger/logger';
import { ResolvedPackageData, resolvePackageData, resolvePackagePath } from '../../../../utils/packages';
import { PathLinux } from '../../../../utils/path';
import ComponentMap from '../../../bit-map/component-map';
import Component from '../../../component/consumer-component';
import { AllDependencies, FileType } from './dependencies-resolver';
export type ManuallyChangedDependencies = {
dependencies?: string[];
devDependencies?: string[];
peerDependencies?: string[];
};
export default class OverridesDependencies {
component: Component;
consumer: Consumer;
componentMap: ComponentMap;
componentFromModel: Component | null | undefined;
manuallyRemovedDependencies: ManuallyChangedDependencies;
manuallyAddedDependencies: ManuallyChangedDependencies;
missingPackageDependencies: string[];
constructor(component: Component, consumer: Consumer) {
this.component = component;
this.consumer = consumer; // $FlowFixMe
// @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX!
this.componentMap = this.component.componentMap;
this.componentFromModel = this.component.componentFromModel;
this.manuallyRemovedDependencies = {};
this.manuallyAddedDependencies = {};
this.missingPackageDependencies = [];
}
shouldIgnoreFile(file: string, fileType: FileType): boolean {
const shouldIgnoreByGlobMatch = (patterns: string[]) => {
return patterns.some((pattern) => minimatch(file, pattern));
};
const field = fileType.isTestFile ? 'devDependencies' : 'dependencies';
const ignoreField = this.component.overrides.getIgnoredFiles(field);
const ignore = shouldIgnoreByGlobMatch(ignoreField);
if (ignore) {
this._addManuallyRemovedDep(field, file);
}
return ignore;
}
shouldIgnorePackage(packageName: string, fileType: FileType): boolean {
const field = fileType.isTestFile ? 'devDependencies' : 'dependencies';
return this.shouldIgnorePackageByType(packageName, field);
}
shouldIgnorePackageByType(packageName: string, field: string): boolean {
const shouldIgnorePackage = (packages: string[]) => {
return packages.some((pkg) => pkg === packageName);
};
const ignoreField = this.component.overrides.getIgnoredPackages(field);
const ignore = shouldIgnorePackage(ignoreField);
if (ignore) {
this._addManuallyRemovedDep(field, packageName);
}
return ignore;
}
shouldIgnorePeerPackage(packageName: string): boolean {
const shouldIgnorePackage = (packages: string[]) => {
return packages.some((pkg) => pkg === packageName);
};
const field = 'peerDependencies';
const ignorePeer = this.component.overrides.getIgnoredPackages(field);
const ignore = shouldIgnorePackage(ignorePeer);
if (ignore) {
this._addManuallyRemovedDep(field, packageName);
}
return ignore;
}
shouldIgnoreComponent(componentId: BitId, fileType: FileType): boolean {
const componentIdStr = componentId.toStringWithoutVersion();
const shouldIgnore = (ids: BitId[]) => {
return ids.some((id) => {
return componentId.isEqualWithoutVersion(id) || componentId.isEqualWithoutScopeAndVersion(id);
});
};
const field = fileType.isTestFile ? 'devDependencies' : 'dependencies';
const ignoredComponents = this._getIgnoredComponentsByField(field);
const ignore = shouldIgnore(ignoredComponents);
if (ignore) {
this._addManuallyRemovedDep(field, componentIdStr);
}
return ignore;
}
getDependenciesToAddManually(
packageJson: Record<string, any> | null | undefined,
existingDependencies: AllDependencies
): { components: Record<string, any>; packages: Record<string, any> } | null | undefined {
const overrides = this.component.overrides.componentOverridesData;
if (!overrides) return null;
const idsFromBitmap = this.consumer.bitMap.getAllBitIdsFromAllLanes([
COMPONENT_ORIGINS.AUTHORED,
COMPONENT_ORIGINS.IMPORTED,
]);
const idsFromModel = this.componentFromModel ? this.componentFromModel.dependencies.getAllIds() : new BitIds();
const components = {};
const packages = {};
DEPENDENCIES_FIELDS.forEach((depField) => {
if (!overrides[depField]) return;
Object.keys(overrides[depField]).forEach((dependency) => {
const dependencyValue = overrides[depField][dependency];
if (dependencyValue === MANUALLY_REMOVE_DEPENDENCY) return;
const componentData = this._getComponentIdToAdd(
depField,
dependency,
dependencyValue,
idsFromBitmap,
idsFromModel
);
if (componentData && componentData.componentId) {
const dependencyExist = existingDependencies[depField].find((d) =>
d.id.isEqualWithoutScopeAndVersion(componentData.componentId)
);
if (!dependencyExist) {
this._addManuallyAddedDep(depField, componentData.componentId.toString());
components[depField] ? components[depField].push(componentData) : (components[depField] = [componentData]);
}
return;
}
const addedPkg = this._manuallyAddPackage(depField, dependency, dependencyValue, packageJson);
if (addedPkg) {
packages[depField] = Object.assign(packages[depField] || {}, addedPkg);
}
});
});
return { components, packages };
}
_getIgnoredComponentsByField(field: 'devDependencies' | 'dependencies' | 'peerDependencies'): BitId[] {
const ignoredPackages = this.component.overrides.getIgnoredPackages(field);
const idsFromBitmap = this.consumer.bitMap.getAllBitIdsFromAllLanes([
COMPONENT_ORIGINS.AUTHORED,
COMPONENT_ORIGINS.IMPORTED,
]);
const idsFromModel = this.componentFromModel ? this.componentFromModel.dependencies.getAllIds() : new BitIds();
const ignoredComponents = ignoredPackages.map((packageName) =>
this._getComponentIdFromPackage(packageName, idsFromBitmap, idsFromModel)
);
return _.compact(ignoredComponents);
}
_getComponentIdToAdd(
field: string,
dependency: string,
dependencyValue: string,
idsFromBitmap: BitIds,
idsFromModel: BitIds
): { componentId?: BitId; packageName?: string } | undefined {
if (field === 'peerDependencies') return undefined;
if (this.consumer.isLegacy && dependency.startsWith(OVERRIDE_COMPONENT_PREFIX)) {
const componentId = this._getComponentIdToAddForLegacyWs(
field,
dependency,
dependencyValue,
idsFromBitmap,
idsFromModel
);
return {
componentId,
};
}
const packageData = this._resolvePackageData(dependency);
return { componentId: packageData?.componentId, packageName: packageData?.name };
}
_getComponentIdFromPackage(packageName: string, idsFromBitmap: BitIds, idsFromModel: BitIds): BitId | undefined {
// backward compatibility
if (this.consumer.isLegacy && packageName.startsWith(OVERRIDE_COMPONENT_PREFIX)) {
const existing = this._getExistingComponentIdFromLegacyPackageName(packageName, idsFromBitmap, idsFromModel);
return existing;
}
const packageData = this._resolvePackageData(packageName);
return packageData?.componentId;
}
/**
* This is used to support legacy projects (before harmony were components used @bit as prefix)
* @param field
* @param dependency
* @param dependencyValue
* @param idsFromBitmap
* @param idsFromModel
*/
_getComponentIdToAddForLegacyWs(
field: string,
dependency: string,
dependencyValue: string,
idsFromBitmap: BitIds,
idsFromModel: BitIds
): BitId | undefined {
if (field === 'peerDependencies') return undefined;
if (!dependency.startsWith(OVERRIDE_COMPONENT_PREFIX)) return undefined;
const id = this._getExistingComponentIdFromLegacyPackageName(dependency, idsFromBitmap, idsFromModel);
if (id) {
return dependencyValue === MANUALLY_ADD_DEPENDENCY ? id : id.changeVersion(dependencyValue);
}
return undefined;
}
/**
* For legacy use only, do not use it on harmony projects
* This will parse the package name and look for the parsed result in the bitmap / ids from model and return the matched id if exist
* @param dependency
* @param idsFromBitmap
* @param idsFromModel
*/
_getExistingComponentIdFromLegacyPackageName(
dependency: string,
idsFromBitmap: BitIds,
idsFromModel: BitIds
): BitId | undefined {
let result: BitId | undefined;
if (!dependency.startsWith(OVERRIDE_COMPONENT_PREFIX)) return undefined;
const compIds = this._getComponentIdFromLegacyPackageName(dependency);
for (const compId of [...compIds, dependency]) {
const bitId = compId.replace(OVERRIDE_COMPONENT_PREFIX, '');
const idFromBitMap =
idsFromBitmap.searchStrWithoutVersion(bitId) || idsFromBitmap.searchStrWithoutScopeAndVersion(bitId);
const idFromModel =
idsFromModel.searchStrWithoutVersion(bitId) || idsFromModel.searchStrWithoutScopeAndVersion(bitId);
// $FlowFixMe one of them must be set (see one line above)
// @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX!
const id: BitId = idFromModel || idFromBitMap;
if (id) {
result = id;
}
}
return result;
}
// This strategy is used only for legacy projects (backward computability)
// This strategy should be stopped using since harmony, because a package name
// might be completely different than a component id
// instead we should go to the package.json and check the component name there
// Like we do in resolvePackageData()
/**
* it is possible that a user added the component into the overrides as a package.
* e.g. `@bit/david.utils.is-string` instead of `@bit/david.utils/is-string`
* or, if not using bit.dev, `@bit/utils.is-string` instead of `@bit/utils/is-string`
*/
_getComponentIdFromLegacyPackageName(idStr: string): string[] {
const idSplitByDot = idStr.split('.');
const numberOfDots = idSplitByDot.length - 1;
if (numberOfDots === 0) return []; // nothing to do. it wasn't entered as a package
const localScopeComponent = idSplitByDot.join('/'); // convert all dots to slashes
if (numberOfDots === 1) {
// it can't be from bit.dev, it must be locally
return [localScopeComponent];
}
// there are two dots or more. it can be from bit.dev and it can be locally
// for a remoteScopeComponent, leave the first dot and convert only the rest to a slash
const remoteScopeComponent = `${R.head(idSplitByDot)}.${R.tail(idSplitByDot).join('/')}`;
return [localScopeComponent, remoteScopeComponent];
}
_manuallyAddPackage(
field: string,
dependency: string,
dependencyValue: string,
packageJson: Record<string, any> | null | undefined
): Record<string, any> | null | undefined {
const packageVersionToAdd = (): string | null | undefined => {
if (dependencyValue !== MANUALLY_ADD_DEPENDENCY) {
return dependencyValue;
}
if (!packageJson) return null;
for (const depField of DEPENDENCIES_FIELDS) {
if (packageJson[depField]) {
const found = Object.keys(packageJson[depField]).find((pkg) => pkg === dependency);
if (found) return packageJson[depField][dependency];
}
}
return null;
};
const versionToAdd = packageVersionToAdd();
if (!versionToAdd) {
logger.debug(`unable to manually add the dependency "${dependency}" into "${this.component.id.toString()}".
it's not an existing component, nor existing package (in a package.json)`);
this.missingPackageDependencies.push(dependency);
return undefined;
}
const packageStr = `${dependency}@${versionToAdd}`;
this._addManuallyAddedDep(field, packageStr);
return { [dependency]: versionToAdd };
}
_addManuallyRemovedDep(field: string, value: string) {
this.manuallyRemovedDependencies[field]
? this.manuallyRemovedDependencies[field].push(value)
: (this.manuallyRemovedDependencies[field] = [value]);
}
_addManuallyAddedDep(field: string, value: string) {
this.manuallyAddedDependencies[field]
? this.manuallyAddedDependencies[field].push(value)
: (this.manuallyAddedDependencies[field] = [value]);
}
// TODO: maybe cache those results??
_resolvePackageData(packageName: string): ResolvedPackageData | undefined {
const rootDir: PathLinux | null | undefined = this.componentMap.rootDir;
const consumerPath = this.consumer.getPath();
const basePath = rootDir ? path.join(consumerPath, rootDir) : consumerPath;
// @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX!
const modulePath = resolvePackagePath(packageName, basePath, consumerPath);
if (!modulePath) return undefined; // e.g. it's author and wasn't exported yet, so there's no node_modules of that component
const packageObject = resolvePackageData(basePath, modulePath);
return packageObject;
}
} | the_stack |
import * as fs from 'fs';
import {RemoteBase, RemoteBreakpoint, BREAK_REASON_NUMBER, Remote} from '../remotebase';
import {GenericWatchpoint, GenericBreakpoint} from '../../genericwatchpoint';
import {Z80RegistersClass, Z80_REG, Z80Registers} from '../z80registers';
import {MemBank16k} from './membank16k';
import {SnaFile} from './snafile';
import {NexFile} from './nexfile';
import {Settings} from '../../settings';
import {Utility} from '../../misc/utility';
import * as path from 'path';
import {Labels} from '../../labels/labels';
import {gzip, ungzip} from 'node-gzip';
import {TimeWait} from '../../misc/timewait';
import {Log} from '../../log';
import {Zx128MemoryModel, Zx48MemoryModel, ZxNextMemoryModel} from '../Paging/memorymodel';
import {Z80RegistersStandardDecoder} from '../z80registersstandarddecoder';
import {PromiseCallbacks} from '../../misc/promisecallbacks';
// The current implemented version of the protocol.
export const DZRP_VERSION = [2, 0, 0];
// The program name and version transmitted during CMD_INIT.
export const DZRP_PROGRAM_NAME = "DeZog v" + process.version;
/**
* The DZP commands and responses.
* The response contains the command with the bit 7 set.
*/
export enum DZRP {
// ZXNext: All Commands available in ZXNext (need to be consecutive)
CMD_INIT = 1,
CMD_CLOSE = 2,
CMD_GET_REGISTERS = 3,
CMD_SET_REGISTER = 4,
CMD_WRITE_BANK = 5,
CMD_CONTINUE = 6,
CMD_PAUSE = 7,
CMD_READ_MEM = 8,
CMD_WRITE_MEM = 9,
CMD_SET_SLOT = 10,
CMD_GET_TBBLUE_REG = 11,
CMD_SET_BORDER = 12,
CMD_SET_BREAKPOINTS = 13,
CMD_RESTORE_MEM = 14,
CMD_LOOPBACK = 15,
CMD_GET_SPRITES_PALETTE = 16,
CMD_GET_SPRITES_CLIP_WINDOW_AND_CONTROL = 17,
// Sprites
CMD_GET_SPRITES = 18,
CMD_GET_SPRITE_PATTERNS = 19,
// Breakpoint
CMD_ADD_BREAKPOINT = 40,
CMD_REMOVE_BREAKPOINT = 41,
CMD_ADD_WATCHPOINT = 42,
CMD_REMOVE_WATCHPOINT = 43,
// State
CMD_READ_STATE = 50,
CMD_WRITE_STATE = 51,
}
/**
* DZRP notifications.
*/
export enum DZRP_NTF {
NTF_PAUSE = 1
}
/**
* Used for the DZRP CMD_CONTINUE alternate command for performance
* improvement.
* Is not implemented yet in DeZog but the DZRP already defines it.
*/
export enum AlternateCommand {
CONTINUE = 0, // I.e. no alternate command
STEP_OVER = 1,
STEP_OUT = 2
}
/**
* Defines the machine type that is returned in CMD_INIT.
* It is required to determine the memory model.
*/
export enum DzrpMachineType {
ZX16K = 1,
ZX48K = 2,
ZX128K = 3,
ZXNEXT = 4
}
/**
* A class that communicates with the remote via the DZRP protocol.
* It is base class for all DZRP remote classes that implement
* special transports like serial connection or socket.
*
* All sendDzrpCmd... methods are empty stubs which need to be filled
* by the specialized implementation.
*
* The class also implements flow/state handling for complex tasks
* like 'continue'.
*/
export class DzrpRemote extends RemoteBase {
// The function to hold the Promise's resolve function for a continue request.
protected funcContinueResolve?: ({breakNumber, breakAddress, breakReasonString}) => void;
// The associated Promise resolve. Stored here to be called at dispose.
protected continueResolve?: PromiseCallbacks<string>;
// This flag is used to pause a step-out.
protected pauseStep = false;
// Object to allow to give time to vscode during long running 'steps'.
protected timeWait: TimeWait;
// A temporary map with the set breakpoints and conditions.
// The tmpBreakpoints are created out of the other breakpoints, assertionBreakpoints and logpoints
// as soon as the z80CpuContinue is called.
// It allows access of the breakpoint by a simple call to one map only.
// It may happen seldom, but it can happen that 2 breakpoints share
// the same address. Therefore the array contains an Array of GenericBreakpoints.
// normally the inner array contains only 1 element.
// The tmpBreakpoints are created when a Continue, StepOver, Stepinto
// or StepOut starts.
// It is used mainly in 'evalBpConditionAndLog()'.
// If a breakpoint is set during the debugged program being run
// the tmpBreakpoints are updated.
protected tmpBreakpoints = new Map<number, Array<GenericBreakpoint>>();
// The watchpoints are collected here. These are all watchpoint set through setWatchpoint.
// If the user could set watchpoints also manually this would include all WPMEM watchpoints plus
// the user set watchpoints.
// Note: it could happen that several watchpoints are defined for the same
// address or that they overlap(they have size).
protected addedWatchpoints = new Set<GenericWatchpoint>();
/// Constructor.
/// Override this.
constructor() {
super();
}
/**
* Checks if there still is an open promise and runs it.
*/
public dispose() {
// Check for open promise
if (this.continueResolve) {
// Call just to end
this.continueResolve.resolve('');
this.continueResolve = undefined;
this.funcContinueResolve = undefined;
}
// As last
super.dispose();
}
/// Override.
/// Initializes the machine.
/// When ready it emits this.emit('initialized') or this.emit('error', Error(...));
/// The successful emit takes place in 'onConnect' which should be called
/// by 'doInitialization' after a successful connect.
public async doInitialization(): Promise<void> {
}
/**
* Call this from 'doInitialization' when a successful connection
* has been opened to the Remote.
* @emits this.emit('initialized') or this.emit('error', Error(...))
*/
protected async onConnect(): Promise<void> {
try {
// Get configuration
const resp = await this.sendDzrpCmdInit();
if (resp.error)
throw Error(resp.error);
// Load executable
await this.loadExecutable();
Z80Registers.decoder = new Z80RegistersStandardDecoder();
// Set memory model according machine type
switch (resp.machineType) {
case DzrpMachineType.ZX48K:
// ZX Spectrum 48K
this.memoryModel = new Zx48MemoryModel();
break;
case DzrpMachineType.ZX128K:
// ZX Spectrum 128K
this.memoryModel = new Zx128MemoryModel();
break;
case DzrpMachineType.ZXNEXT:
// ZxNext: 8x8k banks
this.memoryModel = new ZxNextMemoryModel();
break;
default:
// Error: Unknown type
throw Error("Unknown machine type " + resp.machineType + " received.");
break;
}
this.memoryModel.init();
Labels.convertLabelsTo(this.memoryModel);
// Set Program Counter to execAddress
await Remote.setLaunchExecAddress();
// Get initial registers
await this.getRegistersFromEmulator();
// Ready
const text = "'" + resp.programName + "' initialized.";
this.emit('initialized', text)
}
catch (err) {
this.emit('error', err);
}
}
/**
* Override.
* Stops the emulator.
* This will disconnect e.g. any socket and un-use all data.
* Called e.g. when vscode sends a disconnectRequest
* @param handler is called after the connection is disconnected.
*/
public async disconnect(): Promise<void> {
}
/**
* If cache is empty retrieves the registers from
* the Remote.
*/
public async getRegistersFromEmulator(): Promise<void> {
//Log.log('clearRegisters ->', Z80Registers.getCache() || "undefined");
// Get regs
const regs = await this.sendDzrpCmdGetRegisters();
// And set
Z80Registers.setCache(regs);
//Log.log('clearRegisters <-', Z80Registers.getCache() || "undefined");
}
/**
* Execute specific commands.
* Used to send (for testing) specific DZRP commands to the ZXNext.
* @param cmd E.g. 'cmd_continue.
* @returns A Promise with a return string, i.e. the decoded response.
*/
public async dbgExec(cmd: string): Promise<string> {
const cmdArray = cmd.split(' ');
const cmd_name = cmdArray.shift();
if (cmd_name == "help") {
return "Use e.g. 'cmd_init' to send a DZRP command to the ZX Next.";
}
let response = "";
if (cmd_name == "cmd_init") {
const resp = await this.sendDzrpCmdInit();
response = "Program: '" + resp.programName + "', DZRP Version: " + resp.dzrpVersion + "', machineType: " + resp.machineType + ", Error: " + resp.error;
}
else if (cmd_name == "cmd_continue") {
await this.sendDzrpCmdContinue();
}
else if (cmd_name == "cmd_pause") {
await this.sendDzrpCmdPause();
}
else if (cmd_name == "cmd_get_registers") {
const regs = await this.sendDzrpCmdGetRegisters();
// Registers
const regNames = ["PC", "SP", "AF", "BC", "DE", "HL", "IX", "IY", "AF'", "BC'", "DE'", "HL'", "IR", "IM"];
let i = 0;
for (const name of regNames) {
const value = regs[i];
response += "\n" + name + "(" + i + "): 0x" + Utility.getHexString(value, 4) + "/" + value;
i++;
}
// Slots
i++; // Skip reserved
const slotCount = regs[i++];
response += '\nslots.length=' + slotCount;
for (let k = 0; k < slotCount; k++)
response += '\n slots[' + k + ']=' + regs[k + i];
}
else if (cmd_name == "cmd_set_register") {
if (cmdArray.length < 2) {
// Error
throw Error("Expecting 2 parameters: regIndex and value.");
}
const regIndex = Utility.parseValue(cmdArray[0]);
const value = Utility.parseValue(cmdArray[1]);
await this.sendDzrpCmdSetRegister(regIndex as Z80_REG, value);
}
else if (cmd_name == "cmd_write_bank") {
if (cmdArray.length < 1) {
// Error
throw Error("Expecting 1 parameter: 8k bank number [0-223].");
}
const bank = Utility.parseValue(cmdArray[0]);
// Create test data
const data = new Uint8Array(0x2000);
for (let i = 0; i < data.length; i++)
data[i] = i & 0xFF;
await this.sendDzrpCmdWriteBank(bank, data);
}
else if (cmd_name == "cmd_read_mem") {
if (cmdArray.length < 2) {
// Error
throw Error("Expecting at least 2 parameters: address and count.");
}
const addr = Utility.parseValue(cmdArray[0]);
const count = Utility.parseValue(cmdArray[1]);
const data = await this.sendDzrpCmdReadMem(addr, count);
// Print
response = Utility.getHexString(addr, 4) + "h: ";
for (let i = 0; i < data.length; i++)
response += Utility.getHexString(data[i], 2) + "h ";
}
else if (cmd_name == "cmd_write_mem") {
if (cmdArray.length < 2) {
// Error
throw Error("Expecting at least 2 parameters: address and memory content list.");
}
const addr = Utility.parseValue(cmdArray.shift()!);
// Create test data
const length = cmdArray.length;
const data = new Uint8Array(length);
for (let i = 0; i < data.length; i++)
data[i] = Utility.parseValue(cmdArray[i]) & 0xFF;
await this.sendDzrpCmdWriteMem(addr, data);
}
else if (cmd_name == "cmd_set_slot") {
if (cmdArray.length != 2) {
// Error
throw Error("Expecting 2 parameters: slot and bank.");
}
const slot = Utility.parseValue(cmdArray[0]);
const bank = Utility.parseValue(cmdArray[1]);
await this.sendDzrpCmdSetSlot(slot, bank);
}
else if (cmd_name == "cmd_get_tbblue_reg") {
if (cmdArray.length < 1) {
// Error
throw Error("Expecting 1 parameter: register.");
}
const reg = Utility.parseValue(cmdArray[0]);
const value = await this.sendDzrpCmdGetTbblueReg(reg);
response += "\nReg[" + Utility.getHexString(reg, 2) + "h/" + reg + "]: " + Utility.getHexString(value, 2) + "h/" + value;
}
else if (cmd_name == "cmd_get_sprites_palette") {
if (cmdArray.length < 1) {
// Error
throw Error("Expecting 1 parameter: palette number (0 or 1).");
}
const paletteNumber = Utility.parseValue(cmdArray[0]);
const palette = await this.sendDzrpCmdGetSpritesPalette(paletteNumber);
// Print
for (let i = 0; i < palette.length; i++)
response += Utility.getHexString(palette[i], 3) + " ";
}
else if (cmd_name == "cmd_get_sprites_clip_window_and_control") {
const clip = await this.sendDzrpCmdGetSpritesClipWindow();
response += "xl=" + clip.xl + ", xr=" + clip.xr + ", yt=" + clip.yt + ", yb=" + clip.yb + ", control=" + Utility.getBitsString(clip.control, 8);
}
else if (cmd_name == "cmd_set_breakpoints") {
// Note: This command supports only the setting of 1 breakpoint:
// "cmd_set_breakpoints address bank"
if (cmdArray.length != 2) {
// Error
throw Error("Expecting 2 parameters: address and bank.");
}
const address = Utility.parseValue(cmdArray[0]);
const bank = Utility.parseValue(cmdArray[1]);
// Create data to send
const longAddress = address + ((bank + 1) << 16);
const memValues = await this.sendDzrpCmdSetBreakpoints([longAddress]);
const value = memValues[0];
response += '\n Response: 0x' + Utility.getHexString(value, 2) + '/' + value;
}
else if (cmd_name == "cmd_restore_mem") {
// Note: This command supports only the restoring of 1 breakpoint:
// "cmd_restore_mem address bank value"
if (cmdArray.length != 3) {
// Error
throw Error("Expecting 3 parameters: address, bank and value.");
}
const address = Utility.parseValue(cmdArray[0]);
const bank = Utility.parseValue(cmdArray[1]);
const value = Utility.parseValue(cmdArray[2]);
// Create data to send
const longAddress = address + ((bank + 1) << 16);
await this.sendDzrpCmdRestoreMem([{address: longAddress, value}]);
}
else if (cmd_name == "cmd_add_breakpoint") {
// "cmd_add_breakpoint address bank"
if (cmdArray.length != 2) {
// Error
throw Error("Expecting 2 parameters: address and bank.");
}
const address = Utility.parseValue(cmdArray[0]);
const bank = Utility.parseValue(cmdArray[1]);
// Create data to send
const longAddress = address + ((bank + 1) << 16);
const bp: GenericBreakpoint = {
address: longAddress
};
await this.sendDzrpCmdAddBreakpoint(bp);
response += '\n Breakpoint ID: ' + bp.bpId;
}
else if (cmd_name == "cmd_remove_breakpoint") {
// "cmd_remove_breakpoint breakpointId"
if (cmdArray.length != 1) {
// Error
throw Error("Expecting 1 parameter: breakpoint ID.");
}
const bp: GenericBreakpoint = {
address: -1, // not used
bpId: Utility.parseValue(cmdArray[0])
};
// Create data to send
await this.sendDzrpCmdRemoveBreakpoint(bp);
}
/*
else if (cmd_name=="cmd_get_sprites") {
if (cmdArray.length<2) {
// Error
return "Expecting 2 parameters: sprite start index and count.";
}
const index=Utility.parseValue(cmdArray[0]);
const count=Utility.parseValue(cmdArray[1]);
const data=await this.sendDzrpCmdGetSprites(index, count);
// Print
for (let i=0; i<data.length; i++) {
if (i%5==0)
response+="\nSprite "+(i/5)+": ";
//const value: number=data[0];
//response+=Utility.getHexString(value, 2)+" ";
response+=data[i]+" ";
}
}
*/
else {
throw Error("Error: not supported.");
}
// Return string
let result = "Sent " + cmd_name.toUpperCase() + ".\nResponse received";
if (response)
result += ": " + response;
else
result += ".";
return result;
}
/**
* Sets the value for a specific register.
* Reads the value from the emulator and returns it in the promise.
* Note: if in reverse debug mode the function should do nothing and the promise should return the previous value.
* @param register The register to set, e.g. "BC" or "A'". Note: the register name has to exist. I.e. it should be tested before.
* @param value The new register value.
* @return Promise with the "real" register value.
*/
public async setRegisterValue(register: string, value: number): Promise<number> {
const index = Z80RegistersClass.getEnumFromName(register) as number;
Utility.assert(index != undefined);
// Send command to set register
await this.sendDzrpCmdSetRegister(index, value);
// Send command to get registers
await this.getRegistersFromEmulator(); // Not necessary: this.clearRegsAndSlots();
// Return
const realValue = Z80Registers.getRegValueByName(register);
return realValue;
}
/**
* Sets the slot to a specific bank.
* Used by the unit tests.
* @param slot The slot to set.
* @param bank The bank for the slot.
*/
public async setSlot(slotIndex: number, bank: number): Promise<void> {
await this.sendDzrpCmdSetSlot(slotIndex, bank);
}
/**
* Returns the array of watchpoint for a given address.
* Normally the array is empty or contains only 1 watchpoint.
* But it could happen that several watchpoints are defined for the same address.
* @param The address to check. Could be a long address.
* @return An array with all corresponding watchpoints. Usually only 1 or an empty array.
*/
protected getWatchpointsByAddress(address: number): Array<GenericWatchpoint> {
const address64 = address & 0xFFFF;
const arr = new Array<GenericWatchpoint>();
const slots = this.getSlots()!;
for (const wp of this.addedWatchpoints) {
// Check if address falls in range
const addr = wp.address;
const addr64 = addr & 0xFFFF;
if (address64 < addr64 || address64 >= addr64 + wp.size) // Note: wrap around is ignored
continue;
// Check if wp start address is currently paged in
const bank = Z80Registers.getBankFromAddress(addr);
if (bank >= 0) {
const slotNr = Z80Registers.getSlotFromAddress(addr);
const slotBank = slots[slotNr];
if (bank != slotBank)
continue; // Wrong bank -> Next
}
// WP fits
arr.push(wp);
}
return arr;
}
/**
* Searches the 'breakpoints', the 'assertionBreakpoints' and the
* 'logpoints' arrays for the given breakpoint ID.
* In fact searches tmpBreakpoints. Therefore make sure you called
* createTemporaryBreakpoints before.
* @param bpAddress the breakpoint address to search (!=0).
* @returns The found GenericBreakpoints (or RemoteBreakPoints) or
* [] if no breakpoint found.
*/
protected getBreakpointsByAddress(bpAddress: number): Array<GenericBreakpoint> {
let foundBps = this.tmpBreakpoints.get(bpAddress);
if (!foundBps) // Try 64k address
foundBps = this.tmpBreakpoints.get(bpAddress&0xFFFF) || [];
// Nothing found
return foundBps;
}
/**
* Creates a temporary map from the breakpoints, logpoints and assertions.
* If one entry is set the entry contains a pointer to the breakpoint.
* Or better it contains an array of breakpoints that all share the
* same address.
* Note: normally this array contains only one entry.
*/
protected createTemporaryBreakpoints() {
const tmpBps = this.tmpBreakpoints;
// Clear
tmpBps.clear()
// Get all breakpoints from the enabled logpoints
const enabledLogPoints = this.getEnabledLogpoints();
// Assertion breakpoints
const assertionBps = (this.assertionBreakpointsEnabled) ? this.assertionBreakpoints : [];
const allBps = [...this.breakpoints, ...enabledLogPoints, ...assertionBps];
allBps.forEach(bp => {
this.addTmpBreakpoint(bp);
});
}
/**
* Adds a breakpoint to the temporary array.
* Is called by createTemporaryBreakpoints or if a BP
* is created during a running debugged program.
*/
protected addTmpBreakpoint(bp: GenericBreakpoint) {
const tmpBps = this.tmpBreakpoints;
const bpAddress = bp.address;
let bpInner = tmpBps.get(bpAddress);
if (!bpInner) {
// Create new array
bpInner = new Array<GenericBreakpoint>();
tmpBps.set(bp.address, bpInner);
}
bpInner.push(bp);
}
/**
* Removes a breakpoint from the temporary array.
* Is called by createTemporaryBreakpoints or if a BP
* is removed during a running debugged program.
*/
protected removeTmpBreakpoint(bp: GenericBreakpoint) {
const bpAddress = bp.address;
const bpArray = this.tmpBreakpoints.get(bpAddress)!;
Utility.assert(bpArray);
const len = bpArray.length;
// Find breakpoint ID
for (let i = 0; i < len; i++) {
const bpa = bpArray[i];
if (bpa.bpId == bp.bpId) {
// Breakpoint found
// Remove element
bpArray.splice(i, 1);
// Check if complete array is empty
if (bpArray.length == 0)
this.tmpBreakpoints.delete(bpAddress);
return;
}
}
}
/**
* Takes a breakpoint and checks if it's condition is true and if
* log needs to be done.
* @param bp The GenericBreakpoint.
* @returns [condition, log]
* condition:
* - undefined = Condition not met
* - otherwise: The condition text or '' if no condition was set.
* log:
* - undefined: No log breakpoint or condition not met
* - otherwise: The logpoint text (and condition met).
*/
protected checkConditionAndLog(bp: GenericBreakpoint | undefined): {condition: string | undefined, log: string | undefined} {
if (bp) {
if (bp.condition) {
// Check if condition is true
// REMARK: If I would allow 'await evalExpression' I could also allow e.g. memory checks
try {
const evalCond = Utility.evalExpression(bp.condition, true);
if (evalCond != 0)
return {condition: bp.condition, log: bp.log};
}
catch (e) {
// Extend message
e.message = "Evaluation condition '" + bp.condition + "': " + (e.message || "Unknown error");
throw e;
}
return {condition: undefined, log: bp.log};
}
else {
// No condition
return {condition: '', log: bp.log};
}
}
return {condition: '', log: undefined};
}
/**
* Constructs a human readable break-reason-string from the break number, data and
* an already existing reason string.
* @param breakNumber E.g. BREAK_REASON_NUMBER.WATCHPOINT_READ.
* @param breakAddress E.g. the breakpoint or the watchpoint address.
* @param condition An additional condition or '' if no condition.
* @param breakReasonString An already existing (part of the) reason string.
* The string transmitted from the remote.
* @returns A Promise to the reason string, e.g. "Breakpoint hit. A==4."
*/
protected async constructBreakReasonString(breakNumber: number, breakAddress: number, condition: string, breakReasonString: string): Promise<string> {
Utility.assert(condition != undefined);
if (breakReasonString == undefined)
breakReasonString = '';
// Generate reason text
let reasonString;
switch (breakNumber) {
case BREAK_REASON_NUMBER.NO_REASON:
reasonString = "";
break;
case BREAK_REASON_NUMBER.MANUAL_BREAK:
reasonString = "Manual break.";
break;
case BREAK_REASON_NUMBER.BREAKPOINT_HIT:
// Check if it was an ASSERTION.
const abps = this.assertionBreakpoints.filter(abp => abp.address == breakAddress);
for (const abp of abps) {
if (condition == abp.condition) {
const assertionCond = Utility.getAssertionFromCondition(condition);
//reasonString = "Assertion failed: " + assertionCond;
const replaced = Utility.replaceVarsWithValues(assertionCond);
reasonString = "Assertion failed: " + replaced;
return reasonString;
}
}
// Or breakpoint
const addrString = Utility.getHexString(breakAddress & 0xFFFF, 4);
let bankString = "";
const bank = breakAddress >>> 16;
if (bank != 0)
bankString = " (bank=" + (bank - 1).toString() + ")";
//this.getSlotFromAddress(breakAddress);
reasonString = "Breakpoint hit @" + addrString + "h" + bankString + ".";
if (condition)
reasonString += " Condition: " + condition;
return reasonString;
case BREAK_REASON_NUMBER.WATCHPOINT_READ:
case BREAK_REASON_NUMBER.WATCHPOINT_WRITE:
// Watchpoint
const address = breakAddress;
reasonString = "Watchpoint " + ((breakNumber == BREAK_REASON_NUMBER.WATCHPOINT_READ) ? "read" : "write") + " access at address 0x" + Utility.getLongAddressString(address);
const labels = Labels.getLabelsPlusIndexForNumber64k(address);
if (labels.length > 0) {
const labelsString = labels.join(', ');
reasonString += " (" + labelsString + ")";
}
reasonString += ". " + breakReasonString;
break;
default:
reasonString = breakReasonString;
}
return reasonString;
}
/**
* This method is called before a step (stepOver, stepInto, stepOut,
* continue, stepBack, etc.) is called.
*/
public startProcessing() {
this.createTemporaryBreakpoints();
// Reset flag
this.pauseStep = false;
// Start timer
this.timeWait = new TimeWait(1000, 200, 100); // Every second for 10ms
}
/**
* This method is called after a step (stepOver, stepInto, stepOut,
* continue, stepBack, etc.) is called.
*/
/*
public stopProcessing() {
}
*/
/**
* Evaluates the breakpoint condition and log (logpoint).
* Checks also pauseStep and returns '' if it is true.
* @param breakNumber The break reason as number, e.g. BREAK_REASON_NUMBER.BREAKPOINT_HIT
* @param breakAddress The address of the breakpoint (in future this could also be the address of a watchpoint).
* @returns undefined or the condition text.
* If the breakpoint condition is not true: undefined is returned.
* If the condition is true:
* - If a log is present the logtext is evaluated and a 'debug_console' with the text will be emitted. 'undefined' is returned.
* - If no log is present the condition text is returned.
* All in all:
* If undefined is returned no break should be done.
* If a text is returned the bp condition was true and a break should be done.
*
* The correctedBreakNumber is normally the breakNumber
* that has been given. But in some cases a NO_REASON
* might be turned into a BREAKPOINT_HIT.
*/
protected async evalBpConditionAndLog(breakNumber: number, breakAddress: number): Promise<{condition: string | undefined, correctedBreakNumber: number}> {
// Check breakReason, i.e. check if it was a watchpoint.
let condition;
let correctedBreakNumber = breakNumber;
switch (breakNumber) {
case BREAK_REASON_NUMBER.WATCHPOINT_READ:
case BREAK_REASON_NUMBER.WATCHPOINT_WRITE:
// Check if watchpoint really exists, i.e. it could be that a watchpoint for a wrong bank was hit.
// If no watchpointis found condition stays undefined.
const wps = this.getWatchpointsByAddress(breakAddress);
for (const wp of wps) {
let found = false;
if (breakNumber == BREAK_REASON_NUMBER.WATCHPOINT_READ) {
found = wp.access.includes('r');
}
else {
// WATCHPOINT_WRITE
found = wp.access.includes('w');
}
if (found) {
// REMARK: evaluate condition
// Condition not used at the moment
condition = '';
break;
}
}
break;
case BREAK_REASON_NUMBER.NO_REASON:
case BREAK_REASON_NUMBER.BREAKPOINT_HIT:
// Get corresponding breakpoint
const bps = this.getBreakpointsByAddress(breakAddress);
// Note: If breakAddress is not found (e.g. break in wrong bank) then bps is empty.
// This results in condition being undefined on return which in turn
// results in another continue.
// Loop over all matching breakpoints (normally only one, but could be 2 or more. E.g. if manual BP is at the same point as a LOGPOINT)
for (const bp of bps) {
// Check for condition
let {condition: cond, log} = this.checkConditionAndLog(bp);
//condition=cond;
// Emit log?
if (cond != undefined && log) {
// Convert
const evalLog = await Utility.evalLogString(log);
// Print
this.emit('debug_console', "Log: " + evalLog);
// Don't eval condition again
cond = undefined;
}
if (cond != undefined) {
// At least one break condition found
condition = cond;
correctedBreakNumber = BREAK_REASON_NUMBER.BREAKPOINT_HIT;
//break;
}
}
// Handle continue-breakpoints
if (breakNumber == BREAK_REASON_NUMBER.NO_REASON) {
// Only if other breakpoints not found or condition is false
if (condition == undefined) {
// Temporary breakpoint hit.
condition = '';
}
}
break;
case BREAK_REASON_NUMBER.STEPPING_NOT_ALLOWED:
// Flow through
default:
// Another reason, e.g. manual break
condition = ''; // Do a break.
}
// Check for pause
if (correctedBreakNumber == BREAK_REASON_NUMBER.NO_REASON || condition == undefined) {
// Check for manual pause
if (this.pauseStep) {
condition = ''; // Break
correctedBreakNumber = BREAK_REASON_NUMBER.MANUAL_BREAK;
}
}
return {condition, correctedBreakNumber};
}
/**
* 'continue' debugger program execution.
* @returns A Promise with a string containing the break reason.
*/
public async continue(): Promise<string> {
return new Promise<string>(async resolve => {
// Remember the promise resolve for dispose
Utility.assert(!this.continueResolve);
this.continueResolve = new PromiseCallbacks<string>(this, 'continueResolve', resolve);
// Use a custom function here to evaluate breakpoint condition and log string.
const funcContinueResolve = async ({breakNumber, breakAddress, breakReasonString}) => {
try {
// Give vscode a little time
// await this.timeWait.waitAtInterval(); // REMARK: I think I don't need it anymore
// Get registers
await this.getRegistersFromEmulator();
// Check for break condition
const {condition, correctedBreakNumber} = await this.evalBpConditionAndLog(breakNumber, breakAddress);
// Check for continue
if (condition == undefined) {
// Continue
this.funcContinueResolve = funcContinueResolve;
await this.sendDzrpCmdContinue();
}
else {
// Construct break reason string to report
breakReasonString = await this.constructBreakReasonString(correctedBreakNumber, breakAddress, condition, breakReasonString);
// Clear registers
await this.getRegistersFromEmulator();
await this.getCallStackFromEmulator();
// return
this.continueResolve!.resolve(breakReasonString);
}
}
catch (e) {
// Clear registers
try {
await this.getRegistersFromEmulator();
await this.getCallStackFromEmulator();
} catch (e) {}; // Ignore if error already happened
const reason: string = e.message;
this.continueResolve!.resolve(reason);
}
};
// Send 'run' command
this.funcContinueResolve = funcContinueResolve;
await this.sendDzrpCmdContinue();
});
}
/**
* 'pause' the debugger.
*/
public async pause(): Promise<void> {
// Set this flag to pause a stepOut etc
this.pauseStep = true;
// Send 'pause' command
await this.sendDzrpCmdPause();
}
/**
* 'step over' an instruction in the debugger.
* @param stepOver true=step-over, false=step-into.
* @returns A Promise with a string with the break reason.
* Or 'undefined' if no reason.
*/
public async stepOver(stepOver = true): Promise<string | undefined> {
return new Promise<string | undefined>(async resolve => {
// Remember the promise resolve for dispose
Utility.assert(!this.continueResolve);
this.continueResolve = new PromiseCallbacks<string>(this, 'continueResolve', resolve);
// Prepare for break: This function is called by the PAUSE (break) notification:
const funcContinueResolve = async ({breakNumber, breakAddress, breakReasonString}) => {
// Give vscode a little time
await this.timeWait.waitAtInterval();
// Get registers
await this.getRegistersFromEmulator();
// Check for break condition
let {condition, correctedBreakNumber} = await this.evalBpConditionAndLog(breakNumber, breakAddress);
// Check for continue
if (condition == undefined) {
// Calculate the breakpoints to use for step-over/step-into
// [, bp1, bp2]=await this.calcStepBp(stepOver);
// Note: we need to use the original bp addresses
// Continue
this.funcContinueResolve = funcContinueResolve;
await this.sendDzrpCmdContinue(bp1, bp2);
}
else {
// Construct break reason string to report
breakReasonString = await this.constructBreakReasonString(correctedBreakNumber, breakAddress, condition, breakReasonString);
// Clear registers
await this.getCallStackFromEmulator();
// return
this.continueResolve!.resolve(breakReasonString);
}
};
// Calculate the breakpoints to use for step-over
//await this.getRegisters();
let [, bp1, bp2] = await this.calcStepBp(stepOver);
//this.emit('debug_console', instruction);
// Send 'run' command
this.funcContinueResolve = funcContinueResolve;
// Send command to 'continue'
await this.sendDzrpCmdContinue(bp1, bp2);
});
}
/**
* 'step into' an instruction in the debugger.
* @returns A Promise with a string with the break reason.
* Or 'undefined' if no reason.
*/
public async stepInto(): Promise<string | undefined> {
return this.stepOver(false);
}
/**
* 'step out' of current subroutine.
* The step-out uses normal step (into) functionality and checks
* after each step if the last instruction was some RET and
* the stackpointer is bigger than at the beginning.
* @returns A Promise with a string containing the break reason.
*/
public async stepOut(): Promise<string | undefined> {
return new Promise<string | undefined>(async resolve => {
// Remember the promise resolve for dispose
Utility.assert(!this.continueResolve);
this.continueResolve = new PromiseCallbacks<string>(this, 'continueResolve', resolve);
// Get current SP
const startSp = Z80Registers.getRegValue(Z80_REG.SP);
let prevSp = startSp;
let prevPc = 0;
// Use a custom function here to evaluate breakpoint condition and log string.
const funcContinueResolve = async ({breakNumber, breakAddress, breakReasonString}) => {
try {
// Give vscode a little time
await this.timeWait.waitAtInterval();
// Get registers
await this.getRegistersFromEmulator();
// Check for break condition
let {condition, correctedBreakNumber} = await this.evalBpConditionAndLog(breakNumber, breakAddress);
// For StepOut ignore the stepping tmp breakpoints
if (correctedBreakNumber == BREAK_REASON_NUMBER.NO_REASON)
condition = undefined;
// Check if instruction was a RET(I/N)
if (condition == undefined) {
const currSp = Z80Registers.getRegValue(Z80_REG.SP);
if (currSp > startSp && currSp > prevSp) {
// Something has been popped. This is to exclude unexecuted RET cc.
const bytes = await this.readMemoryDump(prevPc, 2);
const opcodes = bytes[0] + (bytes[1] << 8);
if (this.isRet(opcodes)) {
// Stop here
condition = '';
correctedBreakNumber = BREAK_REASON_NUMBER.NO_REASON;
}
}
}
// Check for continue
if (condition == undefined) {
// Calculate the breakpoints to use for step-over
let [, bp1, bp2] = await this.calcStepBp(true);
// Continue
this.funcContinueResolve = funcContinueResolve;
prevPc = Z80Registers.getPC();
await this.sendDzrpCmdContinue(bp1, bp2);
}
else {
// Construct break reason string to report
breakReasonString = await this.constructBreakReasonString(correctedBreakNumber, breakAddress, condition, breakReasonString);
// Clear registers
await this.getRegistersFromEmulator();
await this.getCallStackFromEmulator();
// return
this.continueResolve!.resolve(breakReasonString);
}
}
catch (e) {
// Clear registers
await this.getRegistersFromEmulator();
await this.getCallStackFromEmulator();
const reason: string = e;
this.continueResolve!.resolve(reason);
}
};
// Calculate the breakpoints to use for step-over
let [, bp1, bp2] = await this.calcStepBp(true);
// Send 'run' command
this.funcContinueResolve = funcContinueResolve;
prevPc = Z80Registers.getPC();
await this.sendDzrpCmdContinue(bp1, bp2);
});
}
/**
* Tests if the opcode is a RET instruction.
* @param opcodes E.g. 0xe52a785c
* @returns false=if not RET (or RETI or RETN or RET cc).
*/
public isRet(opcodes: number): boolean {
// Check for RET
const opcode0 = opcodes & 0xFF;
if (0xC9 == opcode0)
return true;
// Check for RETI or RETN
if (0xED == opcode0) {
const opcode1 = (opcodes >>> 8) & 0xFF;
if (0x4D == opcode1 || 0x45 == opcode1)
return true;
}
// Now check for RET cc
const mask = 0b11000111;
if ((opcode0 & mask) == 0b11000000) {
// RET cc
return true;
}
// No RET
return false;
}
/**
* Sets one watchpoint in the remote.
* Watchpoints result in a break in the program run if one of the addresses is written or read to.
* Promises is execute when last watchpoint has been set.
* @param wp The watchpoint to set. Will set 'bpId' in the 'watchPoint'.
*/
public async setWatchpoint(wp: GenericWatchpoint): Promise<void> {
// Remember watchpoint
this.addedWatchpoints.add(wp);
// Forward request
await this.sendDzrpCmdAddWatchpoint(wp.address, wp.size, wp.access);
}
/**
* Removes one watchpoint from the remote.
* Promises is execute when last watchpoint has been set.
* @param wp The watchpoint to renove. Will set 'bpId' in the 'watchPoint' to undefined.
*/
public async removeWatchpoint(wp: GenericWatchpoint): Promise<void> {
// Forget watchpoint
this.addedWatchpoints.delete(wp);
// Forward request
await this.sendDzrpCmdRemoveWatchpoint(wp.address, wp.size, wp.access);
}
/**
* Enables/disables all assertion breakpoints set from the sources.
* @param enable true=enable, false=disable.
*/
public async enableAssertionBreakpoints(enable: boolean): Promise<void> {
for (let abp of this.assertionBreakpoints) {
if (enable) {
// Set breakpoint
if (!abp.bpId) {
await this.sendDzrpCmdAddBreakpoint(abp); // Sets sbp.bpId
}
}
else {
// Remove breakpoint
if (abp.bpId) {
await this.sendDzrpCmdRemoveBreakpoint(abp);
abp.bpId = undefined;
}
}
}
this.assertionBreakpointsEnabled = enable;
}
/**
* Enables/disable all given points.
* Called at startup and once by enableLogpointGroup (to turn a group on or off).
* Promise is called after the last logpoint is set.
* @param logpoints A list of addresses to put a log breakpoint on.
* @param enable Enable or disable the logpoints.
* @returns A promise that is called after the last watchpoint is set.
*/
public async enableLogpoints(logpoints: Array<GenericBreakpoint>, enable: boolean): Promise<void> {
// Logpoints are treated as normal breakpoints but without a reference to the source file.
// This is not necessary as on a logpoint the execution simply continues after
// logging.
for (let lp of logpoints) {
if (enable) {
// Set breakpoint
if (!lp.bpId) {
await this.sendDzrpCmdAddBreakpoint(lp);
}
}
else {
// Remove breakpoint
if (lp.bpId) {
await await this.sendDzrpCmdRemoveBreakpoint(lp);
lp.bpId = undefined;
}
}
}
}
/*
* Sets breakpoint in the Remote.
* Sets the breakpoint ID (bpId) in bp.
* @param bp The breakpoint.
* @returns The used breakpoint ID. 0 if no breakpoint is available anymore.
*/
public async setBreakpoint(bp: RemoteBreakpoint): Promise<number> {
// Check if "real" PC breakpoint
if (bp.address < 0) {
this.emit('warning', 'DZRP does only support PC breakpoints.');
// set to unverified
bp.address = -1;
return 0;
}
// Set breakpoint
await this.sendDzrpCmdAddBreakpoint(bp);
if (bp.bpId == 0)
bp.address = -1;
// Add to list
this.breakpoints.push(bp);
// If running then add also to temporary list
if (this.funcContinueResolve) {
this.addTmpBreakpoint(bp);
}
// return
return bp.bpId;
}
/**
* Clears one breakpoint.
*/
public async removeBreakpoint(bp: RemoteBreakpoint): Promise<void> {
// Remove from list
let index = this.breakpoints.indexOf(bp);
Utility.assert(index !== -1, 'Breakpoint should be removed but does not exist.');
this.breakpoints.splice(index, 1);
// If running then add remove to temporary list
if (this.funcContinueResolve) {
this.removeTmpBreakpoint(bp);
}
// Remove
await this.sendDzrpCmdRemoveBreakpoint(bp);
}
/**
* Reads a memory.
* @param address The memory start address.
* @param size The memory size.
* @returns A promise with an Uint8Array.
*/
public async readMemoryDump(address: number, size: number): Promise<Uint8Array> {
return await this.sendDzrpCmdReadMem(address, size);
}
/**
* Writes a memory dump.
* @param address The memory start address.
* @param dataArray The data to write.
*/
public async writeMemoryDump(address: number, dataArray: Uint8Array): Promise<void> {
await this.sendDzrpCmdWriteMem(address, dataArray);
}
/**
* Loads .nex or .sna files.
*/
protected async loadBin(filePath: string): Promise<void> {
// Check file extension
const ext = path.extname(filePath);
if (ext == '.sna')
await this.loadBinSna(filePath);
else if (ext == '.nex')
await this.loadBinNex(filePath);
else {
// Error: neither sna nor nex file
throw Error("File extension not supported in '" + filePath + "' with remoteType:'" + Settings.launch.remoteType + "'. Can only load .sna and .nex files.");
}
}
/**
* Loads object file (binary without any meta data).
* @param filePath The absolute path to the file.
* @param startAddress The address where the data should be loaded.
*/
protected async loadObj(filePath: string, startAddress: number): Promise<void> {
// Read file
const objBuffer = fs.readFileSync(filePath);
// Write as memory dump
await this.sendDzrpCmdWriteMem(startAddress, objBuffer);
// Make sure that the registers are reloaded
//await this.getRegistersFromEmulator();
//await this.getCallStackFromEmulator();
}
/**
* Loads a .sna file.
* See https://faqwiki.zxnet.co.uk/wiki/SNA_format
*/
protected async loadBinSna(filePath: string): Promise<void> {
// Load and parse file
const snaFile = new SnaFile();
snaFile.readFile(filePath);
// Set the border
await this.sendDzrpCmdSetBorder(snaFile.borderColor);
// Transfer 16k memory banks
for (const memBank of snaFile.memBanks) {
// As 2x 8k memory banks. I.e. DZRP is for ZX Next only.
const bank8 = 2 * memBank.bank;
await this.sendDzrpCmdWriteBank(bank8, memBank.data.slice(0, MemBank16k.BANK16K_SIZE / 2));
await this.sendDzrpCmdWriteBank(bank8 + 1, memBank.data.slice(MemBank16k.BANK16K_SIZE / 2));
}
// Set the default slot/bank association
const slotBanks = [254, 255, 10, 11, 4, 5, 0, 1]; // 5, 2, 0
for (let slot = 0; slot < 8; slot++) {
const bank8 = slotBanks[slot];
await this.sendDzrpCmdSetSlot(slot, bank8);
}
// Set the registers
await this.sendDzrpCmdSetRegister(Z80_REG.PC, snaFile.pc);
await this.sendDzrpCmdSetRegister(Z80_REG.SP, snaFile.sp);
await this.sendDzrpCmdSetRegister(Z80_REG.AF, snaFile.af);
await this.sendDzrpCmdSetRegister(Z80_REG.BC, snaFile.bc);
await this.sendDzrpCmdSetRegister(Z80_REG.DE, snaFile.de);
await this.sendDzrpCmdSetRegister(Z80_REG.HL, snaFile.hl);
await this.sendDzrpCmdSetRegister(Z80_REG.IX, snaFile.ix);
await this.sendDzrpCmdSetRegister(Z80_REG.IY, snaFile.iy);
await this.sendDzrpCmdSetRegister(Z80_REG.AF2, snaFile.af2);
await this.sendDzrpCmdSetRegister(Z80_REG.BC2, snaFile.bc2);
await this.sendDzrpCmdSetRegister(Z80_REG.DE2, snaFile.de2);
await this.sendDzrpCmdSetRegister(Z80_REG.HL2, snaFile.hl2);
await this.sendDzrpCmdSetRegister(Z80_REG.R, snaFile.r);
await this.sendDzrpCmdSetRegister(Z80_REG.I, snaFile.i);
await this.sendDzrpCmdSetRegister(Z80_REG.IM, snaFile.im);
}
/**
* Loads a .nex file.
* See https://wiki.specnext.dev/NEX_file_format
*/
protected async loadBinNex(filePath: string): Promise<void> {
// Load and parse file
const nexFile = new NexFile();
nexFile.readFile(filePath);
// Set the border
await this.sendDzrpCmdSetBorder(nexFile.borderColor);
// Transfer 16k memory banks
for (const memBank of nexFile.memBanks) {
Log.log("loadBinNex: Writing 16k bank " + memBank.bank);
// As 2x 8k memory banks
const bank8 = 2 * memBank.bank;
await this.sendDzrpCmdWriteBank(bank8, memBank.data.slice(0, MemBank16k.BANK16K_SIZE / 2));
await this.sendDzrpCmdWriteBank(bank8 + 1, memBank.data.slice(MemBank16k.BANK16K_SIZE / 2));
}
// Set the default slot/bank association
const slotBanks = [254, 255, 10, 11, 4, 5, 0, 1]; // 5, 2, 0
for (let slot = 0; slot < 8; slot++) {
const bank8 = slotBanks[slot];
await this.sendDzrpCmdSetSlot(slot, bank8);
}
// Set the SP and PC registers
await this.sendDzrpCmdSetRegister(Z80_REG.SP, nexFile.sp);
await this.sendDzrpCmdSetRegister(Z80_REG.PC, nexFile.pc);
}
/**
* Called from "-state save" command.
* Stores all RAM, registers etc.
* Override.
* @param filePath The file path to store to.
*/
public async stateSave(filePath: string): Promise<void> {
// Get state data
const stateData = await this.sendDzrpCmdReadState();
// Zip data
const zippedData = await gzip(stateData);
// Save data to .tmp/states directory
fs.writeFileSync(filePath, zippedData);
}
/**
* Called from "-state restore" command.
* Restores all RAM + the registers from a former "-state save".
* Override.
* @param filePath The file path to retore from.
*/
public async stateRestore(filePath: string): Promise<void> {
// Read state dta
const zippedData = fs.readFileSync(filePath);
// Unzip data
const stateData = await ungzip(zippedData);
// Restore data
await this.sendDzrpCmdWriteState(stateData);
// Clear register cache
await this.getRegistersFromEmulator();
await this.getCallStackFromEmulator();
}
// ZX Next related ---------------------------------
/**
* Retrieves the TBBlue register value from the emulator.
* @param registerNr The number of the register.
* @returns A promise with the value of the register.
*/
public async getTbblueRegister(registerNr: number): Promise<number> {
const value = await this.sendDzrpCmdGetTbblueReg(registerNr);
return value;
}
/**
* Retrieves the sprites palette from the emulator.
* @param paletteNr 0 or 1.
* @returns A Promise that returns a 256 element Array<number> with the palette values.
*/
public async getTbblueSpritesPalette(paletteNr: number): Promise<Array<number>> {
const palette = await this.sendDzrpCmdGetSpritesPalette(paletteNr);
return palette;
}
/**
* Retrieves the sprites clipping window from the emulator.
* @returns A Promise that returns the clipping dimensions and the control byte(xl, xr, yt, yb, control).
*/
public async getTbblueSpritesClippingWindow(): Promise<{xl: number, xr: number, yt: number, yb: number, control: number}> {
const clip = await this.sendDzrpCmdGetSpritesClipWindow();
return clip;
}
/**
* Retrieves the sprites from the emulator.
* @param slot The start slot.
* @param count The number of slots to retrieve.
* @returns A Promise with an array of sprite attribute data.
*/
public async getTbblueSprites(slot: number, count: number): Promise<Array<Uint8Array>> {
const sprites = await this.sendDzrpCmdGetSprites(slot, count);
return sprites;
}
/**
* Retrieves the sprite patterns from the emulator.
* @param index The start index.
* @param count The number of patterns to retrieve.
* @preturns A Promise with an array of sprite pattern data.
*/
public async getTbblueSpritePatterns(index: number, count: number): Promise<Array<Array<number>>> {
const patterns = await this.sendDzrpCmdGetSpritePatterns(index, count);
return patterns;
}
//------- Send Commands -------
/**
* Override.
* The first command send. Includes the version number.
* @returns The error, program name (incl. version), dzrp version and the machine type.
* error is 0 on success. 0xFF if version numbers not match.
* Other numbers indicate an error on remote side.
*/
protected async sendDzrpCmdInit(): Promise<{error: string | undefined, programName: string, dzrpVersion: string, machineType: DzrpMachineType}> {
Utility.assert(false);
return {error: undefined, dzrpVersion: "", programName: "", machineType: 0};
}
/**
* Override.
* The last command sent. Closes the debug session.
*/
protected async sendDzrpCmdClose(): Promise<void> {
Utility.assert(false);
}
/**
* Override.
* Sends the command to get all registers.
* @returns An Uint16Array with the register data. Same order as in
* 'Z80Registers.getRegisterData'.
*/
protected async sendDzrpCmdGetRegisters(): Promise<Uint16Array> {
Utility.assert(false);
return new Uint16Array(0);
}
/**
* Override.
* Sends the command to set a register value.
* @param regIndex E.g. Z80_REG.BC or Z80_REG.A2
* @param value A 1 byte or 2 byte value.
*/
protected async sendDzrpCmdSetRegister(regIndex: Z80_REG, value: number): Promise<void> {
Utility.assert(false);
}
/**
* Override.
* Sends the command to continue ('run') the program.
* @param bp1Address The 64k address (not long address) of breakpoint 1 or undefined if not used.
* @param bp2Address The 64k address (not long address) of breakpoint 2 or undefined if not used.
*/
protected async sendDzrpCmdContinue(bp1Address?: number, bp2Address?: number): Promise<void> {
Utility.assert(false);
}
/**
* Override.
* Sends the command to pause a running program.
*/
protected async sendDzrpCmdPause(): Promise<void> {
Utility.assert(false);
}
/**
* Override.
* Sends the command to add a breakpoint.
* @param bp The breakpoint. sendDzrpCmdAddBreakpoint will set bp.bpId with the breakpoint
* ID. If the breakpoint could not be set it is set to 0.
*/
protected async sendDzrpCmdAddBreakpoint(bp: GenericBreakpoint): Promise<void> {
Utility.assert(false);
}
/**
* Override.
* Removes a breakpoint from the list.
* @param bp The breakpoint to remove.
*/
protected async sendDzrpCmdRemoveBreakpoint(bp: GenericBreakpoint): Promise<void> {
Utility.assert(false);
}
/**
* Override.
* Sends the command to add a watchpoint.
* @param address The watchpoint long address.
* @param size The size of the watchpoint. address+size-1 is the last address for the watchpoint.
* @param access 'r', 'w' or 'rw'.
*/
protected async sendDzrpCmdAddWatchpoint(address: number, size: number, access: string): Promise<void> {
throw Error("Watchpoints not supported!");
}
/**
* Override.
* Sends the command to remove a watchpoint for an address range.
* @param address The watchpoint long address.
* @param size The size of the watchpoint. address+size-1 is the last address for the watchpoint.
* @param access 'r', 'w' or 'rw'.
*/
protected async sendDzrpCmdRemoveWatchpoint(address: number, size: number, access: string): Promise<void> {
throw Error("Watchpoints not supported!");
}
/**
* Override.
* Sends the command to retrieve a memory dump.
* @param address The memory start address.
* @param size The memory size.
* @returns A promise with an Uint8Array.
*/
protected async sendDzrpCmdReadMem(address: number, size: number): Promise<Uint8Array> {
Utility.assert(false);
return new Uint8Array(0);
}
/**
* Override.
* Sends the command to write a memory dump.
* @param address The memory start address (64k).
* @param dataArray The data to write.
*/
public async sendDzrpCmdWriteMem(address: number, dataArray: Buffer | Uint8Array): Promise<void> {
Utility.assert(false);
}
/**
* Override.
* Sends the command to write a memory bank.
* @param bank 8k memory bank number.
* @param dataArray The data to write.
* @throws An exception if e.g. the bank size does not match.
*/
public async sendDzrpCmdWriteBank(bank: number, dataArray: Buffer | Uint8Array): Promise<void> {
Utility.assert(false);
}
/**
* Override.
* Sends the command to set a slot/bank associations (8k banks).
* @param slot The slot to set
* @param bank The 8k bank to associate the slot with.
* @returns A Promise with an error. An error can only occur on real HW if the slot with dezogif is overwritten.
*/
public async sendDzrpCmdSetSlot(slot: number, bank: number): Promise<number> {
Utility.assert(false);
return 0;
}
/**
* Override.
* Sends the command to read the current state of the machine.
* I.e. memory, registers etc.
* @returns A Promise with state data. Format is unknown (remote specific).
* Data will just be saved.
*/
public async sendDzrpCmdReadState(): Promise<Uint8Array> {
throw Error("Read state not supported!");
//return new Uint8Array();
}
/**
* Override.
* Sends the command to wite a previously saved state to the remote.
* I.e. memory, registers etc.
* @param The state data. Format is unknown (remote specific).
*/
public async sendDzrpCmdWriteState(stateData: Uint8Array): Promise<void> {
throw Error("Write state not supported!");
}
/**
* Returns the value of one TBBlue register.
* @param register The Tbblue register.
* @returns A promise with the value.
*/
public async sendDzrpCmdGetTbblueReg(register: number): Promise<number> {
throw Error("Reading Tbblue registers is not supported.");
}
/**
* Sends the command to get a sprites palette.
* @param index 0/1. The first or the second palette.
* @returns An array with 256 entries with the 9 bit color.
*/
public async sendDzrpCmdGetSpritesPalette(index: number): Promise<Array<number>> {
throw Error("Get sprite palette not supported!");
//return [];
}
/**
* Sends the command to get a number of sprite attributes.
* @param index The index of the sprite.
* @param count The number of sprites to return.
* @returns An array with 5 byte attributes for each sprite.
*/
public async sendDzrpCmdGetSprites(index: number, count: number): Promise<Array<Uint8Array>> {
throw Error("Get sprites not supported!");
//return [];
}
/**
* Sends the command to retrieve sprite patterns.
* Retrieves only 256 byte patterns. If a 128 byte patterns is required
* the full 256 bytes are returned.
* @param index The index of the pattern [0-63]
* @param count The number of patterns [0-64]
* @returns A promise with an Array with the sprite pattern for each index.
*/
protected async sendDzrpCmdGetSpritePatterns(index: number, count: number): Promise<Array<Array<number>>> {
throw Error("Get sprite patterns not supported!");
//return [[]];
}
/**
* Sends the command to get the sprites clipping window.
* @returns A Promise that returns the clipping dimensions and the control byte (xl, xr, yt, yb, control).
*/
public async sendDzrpCmdGetSpritesClipWindow(): Promise<{xl: number, xr: number, yt: number, yb: number, control: number}> {
throw Error("Get sprites clip window not supported!");
//return {xl: 0, xr: 0, yt: 0, yb: 0, control: 0};
}
/**
* Sends the command to set the border.
*/
public async sendDzrpCmdSetBorder(borderColor: number): Promise<void> {
Utility.assert(false);
}
/**
* Sends the command to set all breakpoints.
* For the ZXNext all breakpoints are set at once just before the
* next 'continue' is executed.
* @param bpAddresses The breakpoint addresses. Each 0x0000-0xFFFF.
* @returns A Promise with the memory contents from each breakpoint address.
*/
protected async sendDzrpCmdSetBreakpoints(bpAddresses: Array<number>): Promise<Array<number>> {
Utility.assert(false);
return [];
}
/**
* Sends the command to restore the memory for all breakpoints.
* This is send just after the 'continue' command.
* So that the user only sees correct memory contents even if doing
* a disassembly or memory read.
* It is also required otherwise the breakpoints in 'calcStep' are not correctly
* calculated.
* @param elems The addresses + memory content.
*/
protected async sendDzrpCmdRestoreMem(elems: Array<{address: number, value: number}>): Promise<void> {
Utility.assert(false);
}
} | the_stack |
import * as _ from 'lodash';
import * as Bluebird from 'bluebird';
import { expect } from 'chai';
import {
stub,
spy,
useFakeTimers,
SinonStub,
SinonSpy,
SinonFakeTimers,
} from 'sinon';
import * as supertest from 'supertest';
import * as path from 'path';
import { promises as fs } from 'fs';
import { exists, unlinkAll } from '../src/lib/fs-utils';
import * as appMock from './lib/application-state-mock';
import * as mockedDockerode from './lib/mocked-dockerode';
import mockedAPI = require('./lib/mocked-device-api');
import sampleResponses = require('./data/device-api-responses.json');
import * as config from '../src/config';
import * as logger from '../src/logger';
import SupervisorAPI from '../src/supervisor-api';
import * as apiBinder from '../src/api-binder';
import * as deviceState from '../src/device-state';
import * as apiKeys from '../src/lib/api-keys';
import * as dbus from '../src/lib/dbus';
import * as updateLock from '../src/lib/update-lock';
import * as TargetState from '../src/device-state/target-state';
import * as targetStateCache from '../src/device-state/target-state-cache';
import blink = require('../src/lib/blink');
import constants = require('../src/lib/constants');
import * as deviceAPI from '../src/device-api/common';
import { UpdatesLockedError } from '../src/lib/errors';
import { SchemaTypeKey } from '../src/config/schema-type';
import log from '../src/lib/supervisor-console';
describe('SupervisorAPI [V1 Endpoints]', () => {
let api: SupervisorAPI;
let healthCheckStubs: SinonStub[];
let targetStateCacheMock: SinonStub;
const request = supertest(
`http://127.0.0.1:${mockedAPI.mockedOptions.listenPort}`,
);
const services = [
{ appId: 2, serviceId: 640681, serviceName: 'one' },
{ appId: 2, serviceId: 640682, serviceName: 'two' },
{ appId: 2, serviceId: 640683, serviceName: 'three' },
];
const containers = services.map((service) => mockedAPI.mockService(service));
const images = services.map((service) => mockedAPI.mockImage(service));
let loggerStub: SinonStub;
beforeEach(() => {
// Mock a 3 container release
appMock.mockManagers(containers, [], []);
appMock.mockImages([], false, images);
appMock.mockSupervisorNetwork(true);
targetStateCacheMock.resolves({
appId: 2,
commit: 'abcdef2',
name: 'test-app2',
source: 'https://api.balena-cloud.com',
releaseId: 1232,
services: JSON.stringify(services),
networks: '[]',
volumes: '[]',
});
});
afterEach(() => {
// Clear Dockerode actions recorded for each test
mockedDockerode.resetHistory();
appMock.unmockAll();
});
before(async () => {
await apiBinder.initialized;
await deviceState.initialized;
await targetStateCache.initialized;
// Stub health checks so we can modify them whenever needed
healthCheckStubs = [
stub(apiBinder, 'healthcheck'),
stub(deviceState, 'healthcheck'),
];
// The mockedAPI contains stubs that might create unexpected results
// See the module to know what has been stubbed
api = await mockedAPI.create();
// Start test API
await api.listen(
mockedAPI.mockedOptions.listenPort,
mockedAPI.mockedOptions.timeout,
);
// Mock target state cache
targetStateCacheMock = stub(targetStateCache, 'getTargetApp');
// Create a scoped key
await apiKeys.initialized;
await apiKeys.generateCloudKey();
// Stub logs for all API methods
loggerStub = stub(logger, 'attach');
loggerStub.resolves();
});
after(async () => {
try {
await api.stop();
} catch (e) {
if (e.message !== 'Server is not running.') {
throw e;
}
}
// Restore healthcheck stubs
healthCheckStubs.forEach((hc) => hc.restore());
// Remove any test data generated
await mockedAPI.cleanUp();
targetStateCacheMock.restore();
loggerStub.restore();
});
describe('POST /v1/restart', () => {
it('restarts all containers in release', async () => {
// Perform the test with our mocked release
await mockedDockerode.testWithData({ containers, images }, async () => {
// Perform test
await request
.post('/v1/restart')
.send({ appId: 2 })
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(sampleResponses.V1.POST['/restart'].statusCode)
.then((response) => {
expect(response.body).to.deep.equal(
sampleResponses.V1.POST['/restart'].body,
);
expect(response.text).to.deep.equal(
sampleResponses.V1.POST['/restart'].text,
);
});
// Check that mockedDockerode contains 3 stop and start actions
const removeSteps = _(mockedDockerode.actions)
.pickBy({ name: 'stop' })
.map()
.value();
expect(removeSteps).to.have.lengthOf(3);
const startSteps = _(mockedDockerode.actions)
.pickBy({ name: 'start' })
.map()
.value();
expect(startSteps).to.have.lengthOf(3);
});
});
it('validates request body parameters', async () => {
await request
.post('/v1/restart')
.send({ thing: '' })
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(sampleResponses.V1.POST['/restart [Invalid Body]'].statusCode)
.then((response) => {
expect(response.body).to.deep.equal(
sampleResponses.V1.POST['/restart [Invalid Body]'].body,
);
expect(response.text).to.deep.equal(
sampleResponses.V1.POST['/restart [Invalid Body]'].text,
);
});
});
});
describe('GET /v1/healthy', () => {
it('returns OK because all checks pass', async () => {
// Make all healthChecks pass
healthCheckStubs.forEach((hc) => hc.resolves(true));
await request
.get('/v1/healthy')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(sampleResponses.V1.GET['/healthy'].statusCode)
.then((response) => {
expect(response.body).to.deep.equal(
sampleResponses.V1.GET['/healthy'].body,
);
expect(response.text).to.deep.equal(
sampleResponses.V1.GET['/healthy'].text,
);
});
});
it('Fails because some checks did not pass', async () => {
healthCheckStubs.forEach((hc) => hc.resolves(false));
await request
.get('/v1/healthy')
.set('Accept', 'application/json')
.expect(sampleResponses.V1.GET['/healthy [2]'].statusCode)
.then((response) => {
expect(response.body).to.deep.equal(
sampleResponses.V1.GET['/healthy [2]'].body,
);
expect(response.text).to.deep.equal(
sampleResponses.V1.GET['/healthy [2]'].text,
);
});
});
});
describe('GET /v1/apps/:appId', () => {
it('does not return information for an application when there is more than 1 container', async () => {
await request
.get('/v1/apps/2')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(
sampleResponses.V1.GET['/apps/2 [Multiple containers running]']
.statusCode,
);
});
it('returns information about a specific application', async () => {
// Setup single container application
const container = mockedAPI.mockService({
containerId: 'abc123',
appId: 2,
releaseId: 77777,
});
const image = mockedAPI.mockImage({
appId: 2,
});
appMock.mockManagers([container], [], []);
appMock.mockImages([], false, [image]);
await mockedDockerode.testWithData(
{ containers: [container], images: [image] },
async () => {
// Make request
await request
.get('/v1/apps/2')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(sampleResponses.V1.GET['/apps/2'].statusCode)
.expect('Content-Type', /json/)
.then((response) => {
expect(response.body).to.deep.equal(
sampleResponses.V1.GET['/apps/2'].body,
);
});
},
);
});
});
// TODO: setup for this test is wrong, which leads to inconsistent data being passed to
// manager methods. A refactor is needed
describe.skip('POST /v1/apps/:appId/stop', () => {
it('does not allow stopping an application when there is more than 1 container', async () => {
// Every test case in this suite has a 3 service release mocked so just make the request
await mockedDockerode.testWithData({ containers, images }, async () => {
await request
.post('/v1/apps/2/stop')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(
sampleResponses.V1.GET['/apps/2/stop [Multiple containers running]']
.statusCode,
);
});
});
it('stops a SPECIFIC application and returns a containerId', async () => {
// Setup single container application
const container = mockedAPI.mockService({
containerId: 'abc123',
appId: 2,
});
const image = mockedAPI.mockImage({
appId: 2,
});
appMock.mockManagers([container], [], []);
appMock.mockImages([], false, [image]);
// Perform the test with our mocked release
await mockedDockerode.testWithData(
{ containers: [container], images: [image] },
async () => {
await request
.post('/v1/apps/2/stop')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(sampleResponses.V1.GET['/apps/2/stop'].statusCode)
.expect('Content-Type', /json/)
.then((response) => {
expect(response.body).to.deep.equal(
sampleResponses.V1.GET['/apps/2/stop'].body,
);
});
},
);
});
});
// TODO: setup for this test is wrong, which leads to inconsistent data being passed to
// manager methods. A refactor is needed
describe.skip('POST /v1/apps/:appId/start', () => {
it('does not allow starting an application when there is more than 1 container', async () => {
// Every test case in this suite has a 3 service release mocked so just make the request
await mockedDockerode.testWithData({ containers, images }, async () => {
await request
.post('/v1/apps/2/start')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(400);
});
});
it('starts a SPECIFIC application and returns a containerId', async () => {
const service = {
serviceName: 'main',
containerId: 'abc123',
appId: 2,
serviceId: 640681,
};
// Setup single container application
const container = mockedAPI.mockService(service);
const image = mockedAPI.mockImage(service);
appMock.mockManagers([container], [], []);
appMock.mockImages([], false, [image]);
// Target state returns single service
targetStateCacheMock.resolves({
appId: 2,
commit: 'abcdef2',
name: 'test-app2',
source: 'https://api.balena-cloud.com',
releaseId: 1232,
services: JSON.stringify([service]),
volumes: '[]',
networks: '[]',
});
// Perform the test with our mocked release
await mockedDockerode.testWithData(
{ containers: [container], images: [image] },
async () => {
await request
.post('/v1/apps/2/start')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(200)
.expect('Content-Type', /json/)
.then((response) => {
expect(response.body).to.deep.equal({ containerId: 'abc123' });
});
},
);
});
});
describe('GET /v1/device', () => {
it('returns MAC address', async () => {
const response = await request
.get('/v1/device')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(200);
expect(response.body).to.have.property('mac_address').that.is.not.empty;
});
});
describe('POST /v1/reboot', () => {
let rebootMock: SinonStub;
before(() => {
rebootMock = stub(dbus, 'reboot').resolves((() => void 0) as any);
});
after(() => {
rebootMock.restore();
});
afterEach(() => {
rebootMock.resetHistory();
});
it('should return 202 and reboot if no locks are set', async () => {
// Setup single container application
const container = mockedAPI.mockService({
containerId: 'abc123',
appId: 2,
releaseId: 77777,
});
const image = mockedAPI.mockImage({
appId: 2,
});
appMock.mockManagers([container], [], []);
appMock.mockImages([], false, [image]);
await mockedDockerode.testWithData(
{ containers: [container], images: [image] },
async () => {
const response = await request
.post('/v1/reboot')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(202);
expect(response.body).to.have.property('Data').that.is.not.empty;
expect(rebootMock).to.have.been.calledOnce;
},
);
});
it('should return 423 and reject the reboot if no locks are set', async () => {
stub(updateLock, 'lock').callsFake((__, opts, fn) => {
if (opts.force) {
return Bluebird.resolve(fn());
}
throw new UpdatesLockedError('Updates locked');
});
// Setup single container application
const container = mockedAPI.mockService({
containerId: 'abc123',
appId: 2,
releaseId: 77777,
});
const image = mockedAPI.mockImage({
appId: 2,
});
appMock.mockManagers([container], [], []);
appMock.mockImages([], false, [image]);
await mockedDockerode.testWithData(
{ containers: [container], images: [image] },
async () => {
const response = await request
.post('/v1/reboot')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(423);
expect(updateLock.lock).to.be.calledOnce;
expect(response.body).to.have.property('Error').that.is.not.empty;
expect(rebootMock).to.not.have.been.called;
},
);
(updateLock.lock as SinonStub).restore();
});
it('should return 202 and reboot if force is set to true', async () => {
stub(updateLock, 'lock').callsFake((__, opts, fn) => {
if (opts.force) {
return Bluebird.resolve(fn());
}
throw new UpdatesLockedError('Updates locked');
});
// Setup single container application
const container = mockedAPI.mockService({
containerId: 'abc123',
appId: 2,
releaseId: 77777,
});
const image = mockedAPI.mockImage({
appId: 2,
});
appMock.mockManagers([container], [], []);
appMock.mockImages([], false, [image]);
await mockedDockerode.testWithData(
{ containers: [container], images: [image] },
async () => {
const response = await request
.post('/v1/reboot')
.send({ force: true })
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(202);
expect(updateLock.lock).to.be.calledOnce;
expect(response.body).to.have.property('Data').that.is.not.empty;
expect(rebootMock).to.have.been.calledOnce;
},
);
(updateLock.lock as SinonStub).restore();
});
});
describe('POST /v1/shutdown', () => {
let shutdownMock: SinonStub;
before(() => {
shutdownMock = stub(dbus, 'shutdown').resolves((() => void 0) as any);
});
after(async () => {
shutdownMock.restore();
});
it('should return 202 and shutdown if no locks are set', async () => {
// Setup single container application
const container = mockedAPI.mockService({
containerId: 'abc123',
appId: 2,
releaseId: 77777,
});
const image = mockedAPI.mockImage({
appId: 2,
});
appMock.mockManagers([container], [], []);
appMock.mockImages([], false, [image]);
await mockedDockerode.testWithData(
{ containers: [container], images: [image] },
async () => {
const response = await request
.post('/v1/shutdown')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(202);
expect(response.body).to.have.property('Data').that.is.not.empty;
expect(shutdownMock).to.have.been.calledOnce;
},
);
shutdownMock.resetHistory();
});
it('should return 423 and reject the reboot if no locks are set', async () => {
stub(updateLock, 'lock').callsFake((__, opts, fn) => {
if (opts.force) {
return Bluebird.resolve(fn());
}
throw new UpdatesLockedError('Updates locked');
});
// Setup single container application
const container = mockedAPI.mockService({
containerId: 'abc123',
appId: 2,
releaseId: 77777,
});
const image = mockedAPI.mockImage({
appId: 2,
});
appMock.mockManagers([container], [], []);
appMock.mockImages([], false, [image]);
await mockedDockerode.testWithData(
{ containers: [container], images: [image] },
async () => {
const response = await request
.post('/v1/shutdown')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(423);
expect(updateLock.lock).to.be.calledOnce;
expect(response.body).to.have.property('Error').that.is.not.empty;
expect(shutdownMock).to.not.have.been.called;
},
);
(updateLock.lock as SinonStub).restore();
});
it('should return 202 and shutdown if force is set to true', async () => {
stub(updateLock, 'lock').callsFake((__, opts, fn) => {
if (opts.force) {
return Bluebird.resolve(fn());
}
throw new UpdatesLockedError('Updates locked');
});
// Setup single container application
const container = mockedAPI.mockService({
containerId: 'abc123',
appId: 2,
releaseId: 77777,
});
const image = mockedAPI.mockImage({
appId: 2,
});
appMock.mockManagers([container], [], []);
appMock.mockImages([], false, [image]);
await mockedDockerode.testWithData(
{ containers: [container], images: [image] },
async () => {
const response = await request
.post('/v1/shutdown')
.send({ force: true })
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(202);
expect(updateLock.lock).to.be.calledOnce;
expect(response.body).to.have.property('Data').that.is.not.empty;
expect(shutdownMock).to.have.been.calledOnce;
},
);
(updateLock.lock as SinonStub).restore();
});
});
describe('POST /v1/update', () => {
let configStub: SinonStub;
let targetUpdateSpy: SinonSpy;
before(() => {
configStub = stub(config, 'get');
targetUpdateSpy = spy(TargetState, 'update');
});
afterEach(() => {
targetUpdateSpy.resetHistory();
});
after(() => {
configStub.restore();
targetUpdateSpy.restore();
});
it('returns 204 with no parameters', async () => {
// Stub response for getting instantUpdates
configStub.resolves(true);
// Make request
await request
.post('/v1/update')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(sampleResponses.V1.POST['/update [204 Response]'].statusCode);
// Check that TargetState.update was called
expect(targetUpdateSpy).to.be.called;
expect(targetUpdateSpy).to.be.calledWith(undefined, true);
});
it('returns 204 with force: true in body', async () => {
// Stub response for getting instantUpdates
configStub.resolves(true);
// Make request with force: true in the body
await request
.post('/v1/update')
.send({ force: true })
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(sampleResponses.V1.POST['/update [204 Response]'].statusCode);
// Check that TargetState.update was called
expect(targetUpdateSpy).to.be.called;
expect(targetUpdateSpy).to.be.calledWith(true, true);
});
it('returns 202 when instantUpdates are disabled', async () => {
// Stub response for getting instantUpdates
configStub.resolves(false);
// Make request
await request
.post('/v1/update')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(sampleResponses.V1.POST['/update [202 Response]'].statusCode);
// Check that TargetState.update was not called
expect(targetUpdateSpy).to.not.be.called;
});
});
describe('POST /v1/blink', () => {
// Further blink function-specific testing located in 07-blink.spec.ts
it('responds with code 200 and empty body', async () => {
await request
.post('/v1/blink')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(sampleResponses.V1.POST['/blink'].statusCode)
.then((response) => {
expect(response.body).to.deep.equal(
sampleResponses.V1.POST['/blink'].body,
);
expect(response.text).to.deep.equal(
sampleResponses.V1.POST['/blink'].text,
);
});
});
it('directs device to blink for 15000ms (hardcoded length)', async () => {
const blinkStartSpy: SinonSpy = spy(blink.pattern, 'start');
const blinkStopSpy: SinonSpy = spy(blink.pattern, 'stop');
const clock: SinonFakeTimers = useFakeTimers();
await request
.post('/v1/blink')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.then(() => {
expect(blinkStartSpy.callCount).to.equal(1);
clock.tick(15000);
expect(blinkStopSpy.callCount).to.equal(1);
});
blinkStartSpy.restore();
blinkStopSpy.restore();
clock.restore();
});
});
describe('POST /v1/regenerate-api-key', () => {
it('returns a valid new API key', async () => {
const refreshKeySpy: SinonSpy = spy(apiKeys, 'refreshKey');
let newKey: string = '';
await request
.post('/v1/regenerate-api-key')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(sampleResponses.V1.POST['/regenerate-api-key'].statusCode)
.then((response) => {
expect(response.body).to.deep.equal(
sampleResponses.V1.POST['/regenerate-api-key'].body,
);
expect(response.text).to.equal(apiKeys.cloudApiKey);
newKey = response.text;
expect(refreshKeySpy.callCount).to.equal(1);
});
// Ensure persistence with future calls
await request
.post('/v1/blink')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${newKey}`)
.expect(sampleResponses.V1.POST['/blink'].statusCode);
refreshKeySpy.restore();
});
it('expires old API key after generating new key', async () => {
const oldKey: string = apiKeys.cloudApiKey;
await request
.post('/v1/regenerate-api-key')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${oldKey}`)
.expect(sampleResponses.V1.POST['/regenerate-api-key'].statusCode);
await request
.post('/v1/restart')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${oldKey}`)
.expect(401);
});
it('communicates the new API key to balena API', async () => {
const reportStateSpy: SinonSpy = spy(deviceState, 'reportCurrentState');
await request
.post('/v1/regenerate-api-key')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.then(() => {
expect(reportStateSpy.callCount).to.equal(1);
// Further reportCurrentState tests should be in 05-device-state.spec.ts,
// but its test case seems to currently be skipped until interface redesign
});
reportStateSpy.restore();
});
});
describe('/v1/device/host-config', () => {
// Wrap GET and PATCH /v1/device/host-config tests in the same block to share
// common scoped variables, namely file paths and file content
const hostnamePath: string = path.join(
process.env.ROOT_MOUNTPOINT!,
'/etc/hostname',
);
const proxyBasePath: string = path.join(
process.env.ROOT_MOUNTPOINT!,
process.env.BOOT_MOUNTPOINT!,
'system-proxy',
);
const redsocksPath: string = path.join(proxyBasePath, 'redsocks.conf');
const noProxyPath: string = path.join(proxyBasePath, 'no_proxy');
/**
* Copies contents of hostname, redsocks.conf, and no_proxy test files with `.template`
* endings to test files without `.template` endings to ensure the same data always
* exists for /v1/device/host-config test suites
*/
const restoreConfFileTemplates = async (): Promise<void[]> => {
return Promise.all([
fs.writeFile(
hostnamePath,
await fs.readFile(`${hostnamePath}.template`),
),
fs.writeFile(
redsocksPath,
await fs.readFile(`${redsocksPath}.template`),
),
fs.writeFile(noProxyPath, await fs.readFile(`${noProxyPath}.template`)),
]);
};
// Set hostname & proxy file content to expected defaults
before(async () => await restoreConfFileTemplates());
afterEach(async () => await restoreConfFileTemplates());
// Store GET responses for endpoint in variables so we can be less verbose in tests
const hostnameOnlyRes =
sampleResponses.V1.GET['/device/host-config [Hostname only]'];
const hostnameProxyRes =
sampleResponses.V1.GET['/device/host-config [Hostname and proxy]'];
describe('GET /v1/device/host-config', () => {
it('returns current host config (hostname and proxy)', async () => {
await request
.get('/v1/device/host-config')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(hostnameProxyRes.statusCode)
.then((response) => {
expect(response.body).to.deep.equal(hostnameProxyRes.body);
});
});
it('returns current host config (hostname only)', async () => {
await unlinkAll(redsocksPath, noProxyPath);
await request
.get('/v1/device/host-config')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(hostnameOnlyRes.statusCode)
.then((response) => {
expect(response.body).to.deep.equal(hostnameOnlyRes.body);
});
});
it('errors if no hostname file exists', async () => {
await unlinkAll(hostnamePath);
await request
.get('/v1/device/host-config')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(503);
});
});
describe('PATCH /v1/device/host-config', () => {
let configSetStub: SinonStub;
let logWarnStub: SinonStub;
let restartServiceSpy: SinonSpy;
const validProxyReqs: { [key: string]: number[] | string[] } = {
ip: ['proxy.example.org', 'proxy.foo.org'],
port: [5128, 1080],
type: constants.validRedsocksProxyTypes,
login: ['user', 'user2'],
password: ['foo', 'bar'],
};
// Mock to short-circuit config.set, allowing writing hostname directly to test file
const configSetFakeFn = async <T extends SchemaTypeKey>(
keyValues: config.ConfigMap<T>,
): Promise<void> =>
await fs.writeFile(hostnamePath, (keyValues as any).hostname);
const validatePatchResponse = (res: supertest.Response): void => {
expect(res.text).to.equal(
sampleResponses.V1.PATCH['/host/device-config'].text,
);
expect(res.body).to.deep.equal(
sampleResponses.V1.PATCH['/host/device-config'].body,
);
};
before(() => {
configSetStub = stub(config, 'set').callsFake(configSetFakeFn);
logWarnStub = stub(log, 'warn');
});
after(() => {
configSetStub.restore();
logWarnStub.restore();
});
beforeEach(() => {
restartServiceSpy = spy(dbus, 'restartService');
});
afterEach(() => {
restartServiceSpy.restore();
});
it('updates the hostname with provided string if string is not empty', async () => {
// stub servicePartOf to throw exceptions for the new service names
stub(dbus, 'servicePartOf').callsFake(
async (serviceName: string): Promise<string> => {
if (serviceName === 'balena-hostname') {
throw new Error('Unit not loaded.');
}
return '';
},
);
await unlinkAll(redsocksPath, noProxyPath);
const patchBody = { network: { hostname: 'newdevice' } };
await request
.patch('/v1/device/host-config')
.send(patchBody)
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(sampleResponses.V1.PATCH['/host/device-config'].statusCode)
.then((response) => {
validatePatchResponse(response);
});
// should restart services
expect(restartServiceSpy.callCount).to.equal(2);
expect(restartServiceSpy.args).to.deep.equal([
['balena-hostname'],
['resin-hostname'],
]);
await request
.get('/v1/device/host-config')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.then((response) => {
expect(response.body).to.deep.equal(patchBody);
});
(dbus.servicePartOf as SinonStub).restore();
});
it('skips restarting hostname services if they are part of config-json.target', async () => {
// stub servicePartOf to return the config-json.target we are looking for
stub(dbus, 'servicePartOf').callsFake(
async (): Promise<string> => {
return 'config-json.target';
},
);
await unlinkAll(redsocksPath, noProxyPath);
const patchBody = { network: { hostname: 'newdevice' } };
await request
.patch('/v1/device/host-config')
.send(patchBody)
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(sampleResponses.V1.PATCH['/host/device-config'].statusCode)
.then((response) => {
validatePatchResponse(response);
});
// skips restarting hostname services if they are part of config-json.target
expect(restartServiceSpy.callCount).to.equal(0);
await request
.get('/v1/device/host-config')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.then((response) => {
expect(response.body).to.deep.equal(patchBody);
});
(dbus.servicePartOf as SinonStub).restore();
});
it('updates hostname to first 7 digits of device uuid when sent invalid hostname', async () => {
await unlinkAll(redsocksPath, noProxyPath);
await request
.patch('/v1/device/host-config')
.send({ network: { hostname: '' } })
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(sampleResponses.V1.PATCH['/host/device-config'].statusCode)
.then((response) => {
validatePatchResponse(response);
});
// should restart services
expect(restartServiceSpy.callCount).to.equal(2);
expect(restartServiceSpy.args).to.deep.equal([
['balena-hostname'],
['resin-hostname'],
]);
await request
.get('/v1/device/host-config')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.then(async (response) => {
const uuidHostname = await config
.get('uuid')
.then((uuid) => uuid?.slice(0, 7));
expect(response.body).to.deep.equal({
network: { hostname: uuidHostname },
});
});
});
it('removes proxy when sent empty proxy object', async () => {
await request
.patch('/v1/device/host-config')
.send({ network: { proxy: {} } })
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(sampleResponses.V1.PATCH['/host/device-config'].statusCode)
.then(async (response) => {
validatePatchResponse(response);
expect(await exists(redsocksPath)).to.be.false;
expect(await exists(noProxyPath)).to.be.false;
});
// should restart services
expect(restartServiceSpy.callCount).to.equal(3);
expect(restartServiceSpy.args).to.deep.equal([
['balena-proxy-config'],
['resin-proxy-config'],
['redsocks'],
]);
await request
.get('/v1/device/host-config')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(hostnameOnlyRes.statusCode)
.then((response) => {
expect(response.body).to.deep.equal(hostnameOnlyRes.body);
});
});
it('updates proxy type when provided valid values', async () => {
// stub servicePartOf to throw exceptions for the new service names
stub(dbus, 'servicePartOf').callsFake(
async (serviceName: string): Promise<string> => {
if (serviceName === 'balena-proxy-config') {
throw new Error('Unit not loaded.');
}
return '';
},
);
// Test each proxy patch sequentially to prevent conflicts when writing to fs
let restartCallCount = 0;
for (const key of Object.keys(validProxyReqs)) {
const patchBodyValuesforKey: string[] | number[] =
validProxyReqs[key];
for (const value of patchBodyValuesforKey) {
await request
.patch('/v1/device/host-config')
.send({ network: { proxy: { [key]: value } } })
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(
sampleResponses.V1.PATCH['/host/device-config'].statusCode,
)
.then((response) => {
validatePatchResponse(response);
});
// should restart services
expect(restartServiceSpy.callCount).to.equal(
++restartCallCount * 3,
);
await request
.get('/v1/device/host-config')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(hostnameProxyRes.statusCode)
.then((response) => {
expect(response.body).to.deep.equal({
network: {
hostname: hostnameProxyRes.body.network.hostname,
// All other proxy configs should be unchanged except for any values sent in patch
proxy: {
...hostnameProxyRes.body.network.proxy,
[key]: value,
},
},
});
});
} // end for (const value of patchBodyValuesforKey)
await restoreConfFileTemplates();
} // end for (const key in validProxyReqs)
(dbus.servicePartOf as SinonStub).restore();
});
it('skips restarting proxy services when part of redsocks-conf.target', async () => {
// stub servicePartOf to return the redsocks-conf.target we are looking for
stub(dbus, 'servicePartOf').callsFake(
async (): Promise<string> => {
return 'redsocks-conf.target';
},
);
// Test each proxy patch sequentially to prevent conflicts when writing to fs
for (const key of Object.keys(validProxyReqs)) {
const patchBodyValuesforKey: string[] | number[] =
validProxyReqs[key];
for (const value of patchBodyValuesforKey) {
await request
.patch('/v1/device/host-config')
.send({ network: { proxy: { [key]: value } } })
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(
sampleResponses.V1.PATCH['/host/device-config'].statusCode,
)
.then((response) => {
validatePatchResponse(response);
});
// skips restarting proxy services when part of redsocks-conf.target
expect(restartServiceSpy.callCount).to.equal(0);
await request
.get('/v1/device/host-config')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(hostnameProxyRes.statusCode)
.then((response) => {
expect(response.body).to.deep.equal({
network: {
hostname: hostnameProxyRes.body.network.hostname,
// All other proxy configs should be unchanged except for any values sent in patch
proxy: {
...hostnameProxyRes.body.network.proxy,
[key]: value,
},
},
});
});
} // end for (const value of patchBodyValuesforKey)
await restoreConfFileTemplates();
} // end for (const key in validProxyReqs)
(dbus.servicePartOf as SinonStub).restore();
});
it('warns on the supervisor console when provided disallowed proxy fields', async () => {
const invalidProxyReqs: { [key: string]: string | number } = {
// At this time, don't support changing local_ip or local_port
local_ip: '0.0.0.0',
local_port: 12345,
type: 'invalidType',
noProxy: 'not a list of addresses',
};
for (const key of Object.keys(invalidProxyReqs)) {
await request
.patch('/v1/device/host-config')
.send({ network: { proxy: { [key]: invalidProxyReqs[key] } } })
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(200)
.then(() => {
if (key === 'type') {
expect(logWarnStub).to.have.been.calledWith(
`Invalid redsocks proxy type, must be one of ${validProxyReqs.type.join(
', ',
)}`,
);
} else if (key === 'noProxy') {
expect(logWarnStub).to.have.been.calledWith(
'noProxy field must be an array of addresses',
);
} else {
expect(logWarnStub).to.have.been.calledWith(
`Invalid proxy field(s): ${key}`,
);
}
});
}
});
it('replaces no_proxy file with noProxy array from PATCH body', async () => {
await request
.patch('/v1/device/host-config')
.send({ network: { proxy: { noProxy: ['1.2.3.4/5'] } } })
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(sampleResponses.V1.PATCH['/host/device-config'].statusCode)
.then((response) => {
validatePatchResponse(response);
});
// should restart services
expect(restartServiceSpy.callCount).to.equal(3);
expect(restartServiceSpy.args).to.deep.equal([
['balena-proxy-config'],
['resin-proxy-config'],
['redsocks'],
]);
await request
.get('/v1/device/host-config')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(hostnameProxyRes.statusCode)
.then((response) => {
expect(response.body).to.deep.equal({
network: {
hostname: hostnameProxyRes.body.network.hostname,
// New noProxy should be only value in no_proxy file
proxy: {
...hostnameProxyRes.body.network.proxy,
noProxy: ['1.2.3.4/5'],
},
},
});
});
});
it('removes no_proxy file when sent an empty array', async () => {
await request
.patch('/v1/device/host-config')
.send({ network: { proxy: { noProxy: [] } } })
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(sampleResponses.V1.PATCH['/host/device-config'].statusCode)
.then((response) => {
validatePatchResponse(response);
});
// should restart services
expect(restartServiceSpy.callCount).to.equal(3);
expect(restartServiceSpy.args).to.deep.equal([
['balena-proxy-config'],
['resin-proxy-config'],
['redsocks'],
]);
await request
.get('/v1/device/host-config')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(hostnameProxyRes.statusCode)
.then((response) => {
expect(response.body).to.deep.equal({
network: {
hostname: hostnameProxyRes.body.network.hostname,
// Reference all properties in proxy object EXCEPT noProxy
proxy: {
ip: hostnameProxyRes.body.network.proxy.ip,
login: hostnameProxyRes.body.network.proxy.login,
password: hostnameProxyRes.body.network.proxy.password,
port: hostnameProxyRes.body.network.proxy.port,
type: hostnameProxyRes.body.network.proxy.type,
},
},
});
});
});
it('does not update hostname or proxy when hostname or proxy are undefined', async () => {
await request
.patch('/v1/device/host-config')
.send({ network: {} })
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(sampleResponses.V1.PATCH['/host/device-config'].statusCode)
.then((response) => {
validatePatchResponse(response);
});
// As no host configs were patched, no services should be restarted
expect(restartServiceSpy.callCount).to.equal(0);
await request
.get('/v1/device/host-config')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(hostnameProxyRes.statusCode)
.then((response) => {
expect(response.body).to.deep.equal(hostnameProxyRes.body);
});
});
it('warns on console when sent a malformed patch body', async () => {
await request
.patch('/v1/device/host-config')
.send({})
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(200)
.then(() => {
expect(logWarnStub).to.have.been.calledWith(
"Key 'network' must exist in PATCH body",
);
});
expect(restartServiceSpy.callCount).to.equal(0);
});
});
});
describe('POST /v1/purge', () => {
it('errors if no appId found in request body', async () => {
await request
.post('/v1/purge')
.send({})
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(
sampleResponses.V1.POST['/purge [400 Invalid/missing appId]']
.statusCode,
)
.then((response) => {
expect(response.text).to.equal(
sampleResponses.V1.POST['/purge [400 Invalid/missing appId]'].text,
);
});
});
it('purges the /data directory with valid appId', async () => {
const doPurgeStub: SinonStub = stub(deviceAPI, 'doPurge').resolves();
await mockedDockerode.testWithData({ containers, images }, async () => {
await request
.post('/v1/purge')
.send({ appId: 2 })
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${apiKeys.cloudApiKey}`)
.expect(sampleResponses.V1.POST['/purge [200]'].statusCode)
.then((response) => {
expect(response.body).to.deep.equal(
sampleResponses.V1.POST['/purge [200]'].body,
);
});
});
expect(doPurgeStub.callCount).to.equal(1);
doPurgeStub.restore();
});
it('errors if appId is out of scope (application not available)', async () => {
// Generate a new scoped key to call the endpoint, as mocked
// appId = 2 services are all in the global scope and thus
// resolve to true for any isScoped check
const scopedKey = await apiKeys.generateScopedKey(
2,
containers[0].serviceName,
);
await request
.post('/v1/purge')
.send({ appId: 3 })
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${scopedKey}`)
.expect(sampleResponses.V1.POST['/purge [401 Out of scope]'].statusCode)
.then((response) => {
expect(response.body).to.deep.equal(
sampleResponses.V1.POST['/purge [401 Out of scope]'].body,
);
});
});
});
}); | the_stack |
import {
AddFieldUpdateAction,
DiscardObjectUpdatesAction,
FieldChangeType,
InitializeFieldsAction,
ObjectUpdatesAction,
ObjectUpdatesActionTypes,
ReinstateObjectUpdatesAction,
RemoveFieldUpdateAction,
RemoveObjectUpdatesAction,
SetEditableFieldUpdateAction,
SetValidFieldUpdateAction,
SelectVirtualMetadataAction,
} from './object-updates.actions';
import { hasNoValue, hasValue } from '../../../shared/empty.util';
import { Relationship } from '../../shared/item-relationships/relationship.model';
import { PatchOperationService } from './patch-operation-service/patch-operation.service';
import { Item } from '../../shared/item.model';
import { RelationshipType } from '../../shared/item-relationships/relationship-type.model';
import { GenericConstructor } from '../../shared/generic-constructor';
/**
* Path where discarded objects are saved
*/
export const OBJECT_UPDATES_TRASH_PATH = '/trash';
/**
* The state for a single field
*/
export interface FieldState {
editable: boolean;
isNew: boolean;
isValid: boolean;
}
/**
* A list of states for all the fields for a single page, mapped by uuid
*/
export interface FieldStates {
[uuid: string]: FieldState;
}
/**
* Represents every object that has a UUID
*/
export interface Identifiable {
uuid: string;
}
/**
* The state of a single field update
*/
export interface FieldUpdate {
field: Identifiable;
changeType: FieldChangeType;
}
/**
* The states of all field updates available for a single page, mapped by uuid
*/
export interface FieldUpdates {
[uuid: string]: FieldUpdate;
}
/**
* The states of all virtual metadata selections available for a single page, mapped by the relationship uuid
*/
export interface VirtualMetadataSources {
[source: string]: VirtualMetadataSource;
}
/**
* The selection of virtual metadata for a relationship, mapped by the uuid of either the item or the relationship type
*/
export interface VirtualMetadataSource {
[uuid: string]: boolean;
}
export interface RelationshipIdentifiable extends Identifiable {
nameVariant?: string;
relatedItem: Item;
relationship: Relationship;
type: RelationshipType;
}
/**
* A fieldupdate interface which represents a relationship selected to be deleted,
* along with a selection of the virtual metadata to keep
*/
export interface DeleteRelationship extends RelationshipIdentifiable {
keepLeftVirtualMetadata: boolean;
keepRightVirtualMetadata: boolean;
}
/**
* The updated state of a single page
*/
export interface ObjectUpdatesEntry {
fieldStates: FieldStates;
fieldUpdates: FieldUpdates;
virtualMetadataSources: VirtualMetadataSources;
lastModified: Date;
patchOperationService?: GenericConstructor<PatchOperationService>;
}
/**
* The updated state of all pages, mapped by the page URL
*/
export interface ObjectUpdatesState {
[url: string]: ObjectUpdatesEntry;
}
/**
* Initial state for an existing initialized field
*/
const initialFieldState = { editable: false, isNew: false, isValid: true };
/**
* Initial state for a newly added field
*/
const initialNewFieldState = { editable: true, isNew: true, isValid: undefined };
// Object.create(null) ensures the object has no default js properties (e.g. `__proto__`)
const initialState = Object.create(null);
/**
* Reducer method to calculate the next ObjectUpdates state, based on the current state and the ObjectUpdatesAction
* @param state The current state
* @param action The action to perform on the current state
*/
export function objectUpdatesReducer(state = initialState, action: ObjectUpdatesAction): ObjectUpdatesState {
switch (action.type) {
case ObjectUpdatesActionTypes.INITIALIZE_FIELDS: {
return initializeFieldsUpdate(state, action as InitializeFieldsAction);
}
case ObjectUpdatesActionTypes.ADD_FIELD: {
return addFieldUpdate(state, action as AddFieldUpdateAction);
}
case ObjectUpdatesActionTypes.SELECT_VIRTUAL_METADATA: {
return selectVirtualMetadata(state, action as SelectVirtualMetadataAction);
}
case ObjectUpdatesActionTypes.DISCARD: {
return discardObjectUpdates(state, action as DiscardObjectUpdatesAction);
}
case ObjectUpdatesActionTypes.REINSTATE: {
return reinstateObjectUpdates(state, action as ReinstateObjectUpdatesAction);
}
case ObjectUpdatesActionTypes.REMOVE: {
return removeObjectUpdates(state, action as RemoveObjectUpdatesAction);
}
case ObjectUpdatesActionTypes.REMOVE_ALL: {
return removeAllObjectUpdates(state);
}
case ObjectUpdatesActionTypes.REMOVE_FIELD: {
return removeFieldUpdate(state, action as RemoveFieldUpdateAction);
}
case ObjectUpdatesActionTypes.SET_EDITABLE_FIELD: {
return setEditableFieldUpdate(state, action as SetEditableFieldUpdateAction);
}
case ObjectUpdatesActionTypes.SET_VALID_FIELD: {
return setValidFieldUpdate(state, action as SetValidFieldUpdateAction);
}
default: {
return state;
}
}
}
/**
* Initialize the state for a specific url and store all its fields in the store
* @param state The current state
* @param action The action to perform on the current state
*/
function initializeFieldsUpdate(state: any, action: InitializeFieldsAction) {
const url: string = action.payload.url;
const fields: Identifiable[] = action.payload.fields;
const lastModifiedServer: Date = action.payload.lastModified;
const patchOperationService: GenericConstructor<PatchOperationService> = action.payload.patchOperationService;
const fieldStates = createInitialFieldStates(fields);
const newPageState = Object.assign(
{},
state[url],
{ fieldStates: fieldStates },
{ fieldUpdates: {} },
{ virtualMetadataSources: {} },
{ lastModified: lastModifiedServer },
{ patchOperationService }
);
return Object.assign({}, state, { [url]: newPageState });
}
/**
* Add a new update for a specific field to the store
* @param state The current state
* @param action The action to perform on the current state
*/
function addFieldUpdate(state: any, action: AddFieldUpdateAction) {
const url: string = action.payload.url;
const field: Identifiable = action.payload.field;
const changeType: FieldChangeType = action.payload.changeType;
const pageState: ObjectUpdatesEntry = state[url] || {fieldUpdates: {}};
let states = pageState.fieldStates;
if (changeType === FieldChangeType.ADD) {
states = Object.assign({}, { [field.uuid]: initialNewFieldState }, pageState.fieldStates);
}
let fieldUpdate: any = pageState.fieldUpdates[field.uuid] || {};
const newChangeType = determineChangeType(fieldUpdate.changeType, changeType);
fieldUpdate = Object.assign({}, { field, changeType: newChangeType });
const fieldUpdates = Object.assign({}, pageState.fieldUpdates, { [field.uuid]: fieldUpdate });
const newPageState = Object.assign({}, pageState,
{ fieldStates: states },
{ fieldUpdates: fieldUpdates });
return Object.assign({}, state, { [url]: newPageState });
}
/**
* Update the selected virtual metadata in the store
* @param state The current state
* @param action The action to perform on the current state
*/
function selectVirtualMetadata(state: any, action: SelectVirtualMetadataAction) {
const url: string = action.payload.url;
const source: string = action.payload.source;
const uuid: string = action.payload.uuid;
const select: boolean = action.payload.select;
const pageState: ObjectUpdatesEntry = state[url] || {};
const virtualMetadataSource = Object.assign(
{},
pageState.virtualMetadataSources[source],
{
[uuid]: select,
},
);
const virtualMetadataSources = Object.assign(
{},
pageState.virtualMetadataSources,
{
[source]: virtualMetadataSource,
},
);
const newPageState = Object.assign(
{},
pageState,
{virtualMetadataSources: virtualMetadataSources},
);
return Object.assign(
{},
state,
{
[url]: newPageState,
}
);
}
/**
* Discard all updates for a specific action's url in the store
* @param state The current state
* @param action The action to perform on the current state
*/
function discardObjectUpdates(state: any, action: DiscardObjectUpdatesAction) {
if (action.payload.discardAll) {
let newState = Object.assign({}, state);
Object.keys(state).filter((path: string) => !path.endsWith(OBJECT_UPDATES_TRASH_PATH)).forEach((path: string) => {
newState = discardObjectUpdatesFor(path, newState);
});
return newState;
} else {
const url: string = action.payload.url;
return discardObjectUpdatesFor(url, state);
}
}
/**
* Discard all updates for a specific action's url in the store
* @param url The action's url
* @param state The current state
*/
function discardObjectUpdatesFor(url: string, state: any) {
const pageState: ObjectUpdatesEntry = state[url];
const newFieldStates = {};
Object.keys(pageState.fieldStates).forEach((uuid: string) => {
const fieldState: FieldState = pageState.fieldStates[uuid];
if (!fieldState.isNew) {
/* After discarding we don't want the reset fields to stay editable or invalid */
newFieldStates[uuid] = Object.assign({}, fieldState, { editable: false, isValid: true });
}
});
const discardedPageState = Object.assign({}, pageState, {
fieldUpdates: {},
fieldStates: newFieldStates
});
return Object.assign({}, state, { [url]: discardedPageState }, { [url + OBJECT_UPDATES_TRASH_PATH]: pageState });
}
/**
* Reinstate all updates for a specific action's url in the store
* @param state The current state
* @param action The action to perform on the current state
*/
function reinstateObjectUpdates(state: any, action: ReinstateObjectUpdatesAction) {
const url: string = action.payload.url;
const trashState = state[url + OBJECT_UPDATES_TRASH_PATH];
const newState = Object.assign({}, state, { [url]: trashState });
delete newState[url + OBJECT_UPDATES_TRASH_PATH];
return newState;
}
/**
* Remove all updates for a specific action's url in the store
* @param state The current state
* @param action The action to perform on the current state
*/
function removeObjectUpdates(state: any, action: RemoveObjectUpdatesAction) {
const url: string = action.payload.url;
return removeObjectUpdatesByURL(state, url);
}
/**
* Remove all updates for a specific url in the store
* @param state The current state
* @param action The action to perform on the current state
*/
function removeObjectUpdatesByURL(state: any, url: string) {
const newState = Object.assign({}, state);
delete newState[url + OBJECT_UPDATES_TRASH_PATH];
return newState;
}
/**
* Remove all updates in the store
* @param state The current state
*/
function removeAllObjectUpdates(state: any) {
const newState = Object.assign({}, state);
Object.keys(state).filter((path: string) => path.endsWith(OBJECT_UPDATES_TRASH_PATH)).forEach((path: string) => {
delete newState[path];
});
return newState;
}
/**
* Discard the update for a specific action's url and field UUID in the store
* @param state The current state
* @param action The action to perform on the current state
*/
function removeFieldUpdate(state: any, action: RemoveFieldUpdateAction) {
const url: string = action.payload.url;
const uuid: string = action.payload.uuid;
let newPageState: ObjectUpdatesEntry = state[url];
if (hasValue(newPageState)) {
const newUpdates: FieldUpdates = Object.assign({}, newPageState.fieldUpdates);
if (hasValue(newUpdates[uuid])) {
delete newUpdates[uuid];
}
const newFieldStates: FieldStates = Object.assign({}, newPageState.fieldStates);
if (hasValue(newFieldStates[uuid])) {
/* When resetting, make field not editable */
if (newFieldStates[uuid].isNew) {
/* If this field was added, just throw it away */
delete newFieldStates[uuid];
} else {
newFieldStates[uuid] = Object.assign({}, newFieldStates[uuid], { editable: false, isValid: true });
}
}
newPageState = Object.assign({}, state[url], {
fieldUpdates: newUpdates,
fieldStates: newFieldStates
});
}
return Object.assign({}, state, { [url]: newPageState });
}
/**
* Determine the most prominent FieldChangeType, ordered as follows:
* undefined < UPDATE < ADD < REMOVE
* @param oldType The current type
* @param newType The new type that should possibly override the new type
*/
function determineChangeType(oldType: FieldChangeType, newType: FieldChangeType): FieldChangeType {
if (hasNoValue(newType)) {
return oldType;
}
if (hasNoValue(oldType)) {
return newType;
}
return oldType.valueOf() > newType.valueOf() ? oldType : newType;
}
/**
* Set the editable state of a specific action's url and uuid to false or true
* @param state The current state
* @param action The action to perform on the current state
*/
function setEditableFieldUpdate(state: any, action: SetEditableFieldUpdateAction) {
const url: string = action.payload.url;
const uuid: string = action.payload.uuid;
const editable: boolean = action.payload.editable;
const pageState: ObjectUpdatesEntry = state[url];
const fieldState = pageState.fieldStates[uuid];
const newFieldState = Object.assign({}, fieldState, { editable });
const newFieldStates = Object.assign({}, pageState.fieldStates, { [uuid]: newFieldState });
const newPageState = Object.assign({}, pageState, { fieldStates: newFieldStates });
return Object.assign({}, state, { [url]: newPageState });
}
/**
* Set the isValid state of a specific action's url and uuid to false or true
* @param state The current state
* @param action The action to perform on the current state
*/
function setValidFieldUpdate(state: any, action: SetValidFieldUpdateAction) {
const url: string = action.payload.url;
const uuid: string = action.payload.uuid;
const isValid: boolean = action.payload.isValid;
const pageState: ObjectUpdatesEntry = state[url];
const fieldState = pageState.fieldStates[uuid];
const newFieldState = Object.assign({}, fieldState, { isValid });
const newFieldStates = Object.assign({}, pageState.fieldStates, { [uuid]: newFieldState });
const newPageState = Object.assign({}, pageState, { fieldStates: newFieldStates });
return Object.assign({}, state, { [url]: newPageState });
}
/**
* Method to create an initial FieldStates object based on a list of Identifiable objects
* @param fields Identifiable objects
*/
function createInitialFieldStates(fields: Identifiable[]) {
const uuids = fields.map((field: Identifiable) => field.uuid);
const fieldStates = {};
uuids.forEach((uuid: string) => fieldStates[uuid] = initialFieldState);
return fieldStates;
} | the_stack |
import { logging } from '@angular-devkit/core';
import { concatMap, count, take, timeout } from 'rxjs/operators';
import { buildWebpackBrowser } from '../../index';
import { BASE_OPTIONS, BROWSER_BUILDER_INFO, describeBuilder } from '../setup';
describeBuilder(buildWebpackBrowser, BROWSER_BUILDER_INFO, (harness) => {
describe('Behavior: "Rebuild Error"', () => {
it('detects template errors with no AOT codegen or TS emit differences', async () => {
harness.useTarget('build', {
...BASE_OPTIONS,
aot: true,
watch: true,
});
const goodDirectiveContents = `
import { Directive, Input } from '@angular/core';
@Directive({ selector: 'dir' })
export class Dir {
@Input() foo: number;
}
`;
const typeErrorText = `Type 'number' is not assignable to type 'string'.`;
// Create a directive and add to application
await harness.writeFile('src/app/dir.ts', goodDirectiveContents);
await harness.writeFile(
'src/app/app.module.ts',
`
import { NgModule } from '@angular/core';
import { BrowserModule } from '@angular/platform-browser';
import { AppComponent } from './app.component';
import { Dir } from './dir';
@NgModule({
declarations: [
AppComponent,
Dir,
],
imports: [
BrowserModule
],
providers: [],
bootstrap: [AppComponent]
})
export class AppModule { }
`,
);
// Create app component that uses the directive
await harness.writeFile(
'src/app/app.component.ts',
`
import { Component } from '@angular/core'
@Component({
selector: 'app-root',
template: '<dir [foo]="123">',
})
export class AppComponent { }
`,
);
const buildCount = await harness
.execute({ outputLogsOnFailure: false })
.pipe(
timeout(60000),
concatMap(async ({ result, logs }, index) => {
switch (index) {
case 0:
expect(result?.success).toBeTrue();
// Update directive to use a different input type for 'foo' (number -> string)
// Should cause a template error
await harness.writeFile(
'src/app/dir.ts',
`
import { Directive, Input } from '@angular/core';
@Directive({ selector: 'dir' })
export class Dir {
@Input() foo: string;
}
`,
);
break;
case 1:
expect(result?.success).toBeFalse();
expect(logs).toContain(
jasmine.objectContaining<logging.LogEntry>({
message: jasmine.stringMatching(typeErrorText),
}),
);
// Make an unrelated change to verify error cache was updated
// Should persist error in the next rebuild
await harness.modifyFile('src/main.ts', (content) => content + '\n');
break;
case 2:
expect(result?.success).toBeFalse();
expect(logs).toContain(
jasmine.objectContaining<logging.LogEntry>({
message: jasmine.stringMatching(typeErrorText),
}),
);
// Revert the directive change that caused the error
// Should remove the error
await harness.writeFile('src/app/dir.ts', goodDirectiveContents);
break;
case 3:
expect(result?.success).toBeTrue();
expect(logs).not.toContain(
jasmine.objectContaining<logging.LogEntry>({
message: jasmine.stringMatching(typeErrorText),
}),
);
// Make an unrelated change to verify error cache was updated
// Should continue showing no error
await harness.modifyFile('src/main.ts', (content) => content + '\n');
break;
case 4:
expect(result?.success).toBeTrue();
expect(logs).not.toContain(
jasmine.objectContaining<logging.LogEntry>({
message: jasmine.stringMatching(typeErrorText),
}),
);
break;
}
}),
take(5),
count(),
)
.toPromise();
expect(buildCount).toBe(5);
});
it('detects template errors with AOT codegen differences', async () => {
harness.useTarget('build', {
...BASE_OPTIONS,
aot: true,
watch: true,
});
const typeErrorText = `Type 'number' is not assignable to type 'string'.`;
// Create two directives and add to application
await harness.writeFile(
'src/app/dir.ts',
`
import { Directive, Input } from '@angular/core';
@Directive({ selector: 'dir' })
export class Dir {
@Input() foo: number;
}
`,
);
// Same selector with a different type on the `foo` property but initially no `@Input`
const goodDirectiveContents = `
import { Directive } from '@angular/core';
@Directive({ selector: 'dir' })
export class Dir2 {
foo: string;
}
`;
await harness.writeFile('src/app/dir2.ts', goodDirectiveContents);
await harness.writeFile(
'src/app/app.module.ts',
`
import { NgModule } from '@angular/core';
import { BrowserModule } from '@angular/platform-browser';
import { AppComponent } from './app.component';
import { Dir } from './dir';
import { Dir2 } from './dir2';
@NgModule({
declarations: [
AppComponent,
Dir,
Dir2,
],
imports: [
BrowserModule
],
providers: [],
bootstrap: [AppComponent]
})
export class AppModule { }
`,
);
// Create app component that uses the directive
await harness.writeFile(
'src/app/app.component.ts',
`
import { Component } from '@angular/core'
@Component({
selector: 'app-root',
template: '<dir [foo]="123">',
})
export class AppComponent { }
`,
);
const buildCount = await harness
.execute({ outputLogsOnFailure: false })
.pipe(
timeout(60000),
concatMap(async ({ result, logs }, index) => {
switch (index) {
case 0:
expect(result?.success).toBeTrue();
// Update second directive to use string property `foo` as an Input
// Should cause a template error
await harness.writeFile(
'src/app/dir2.ts',
`
import { Directive, Input } from '@angular/core';
@Directive({ selector: 'dir' })
export class Dir2 {
@Input() foo: string;
}
`,
);
break;
case 1:
expect(result?.success).toBeFalse();
expect(logs).toContain(
jasmine.objectContaining<logging.LogEntry>({
message: jasmine.stringMatching(typeErrorText),
}),
);
// Make an unrelated change to verify error cache was updated
// Should persist error in the next rebuild
await harness.modifyFile('src/main.ts', (content) => content + '\n');
break;
case 2:
expect(result?.success).toBeFalse();
expect(logs).toContain(
jasmine.objectContaining<logging.LogEntry>({
message: jasmine.stringMatching(typeErrorText),
}),
);
// Revert the directive change that caused the error
// Should remove the error
await harness.writeFile('src/app/dir2.ts', goodDirectiveContents);
break;
case 3:
expect(result?.success).toBeTrue();
expect(logs).not.toContain(
jasmine.objectContaining<logging.LogEntry>({
message: jasmine.stringMatching(typeErrorText),
}),
);
// Make an unrelated change to verify error cache was updated
// Should continue showing no error
await harness.modifyFile('src/main.ts', (content) => content + '\n');
break;
case 4:
expect(result?.success).toBeTrue();
expect(logs).not.toContain(
jasmine.objectContaining<logging.LogEntry>({
message: jasmine.stringMatching(typeErrorText),
}),
);
break;
}
}),
take(5),
count(),
)
.toPromise();
expect(buildCount).toBe(5);
});
it('recovers from component stylesheet error', async () => {
harness.useTarget('build', {
...BASE_OPTIONS,
watch: true,
aot: false,
});
const buildCount = await harness
.execute({ outputLogsOnFailure: false })
.pipe(
timeout(30000),
concatMap(async ({ result, logs }, index) => {
switch (index) {
case 0:
expect(result?.success).toBeTrue();
await harness.writeFile('src/app/app.component.css', 'invalid-css-content');
break;
case 1:
expect(result?.success).toBeFalse();
expect(logs).toContain(
jasmine.objectContaining<logging.LogEntry>({
message: jasmine.stringMatching('invalid-css-content'),
}),
);
await harness.writeFile('src/app/app.component.css', 'p { color: green }');
break;
case 2:
expect(result?.success).toBeTrue();
expect(logs).not.toContain(
jasmine.objectContaining<logging.LogEntry>({
message: jasmine.stringMatching('invalid-css-content'),
}),
);
harness.expectFile('dist/main.js').content.toContain('p { color: green }');
break;
}
}),
take(3),
count(),
)
.toPromise();
expect(buildCount).toBe(3);
});
});
}); | the_stack |
import {
Dereference,
decodePtrInit,
compilePointerDereference,
setValueAtPath,
encodePointer,
encodeUriFragmentIdentifier,
pickDecoder,
unsetValueAtPath,
decodeRelativePointer,
} from './util';
import {
JsonStringPointer,
UriFragmentIdentifierPointer,
Pointer,
RelativeJsonPointer,
PathSegments,
Encoder,
JsonStringPointerListItem,
UriFragmentIdentifierPointerListItem,
} from './types';
/**
* Determines if the value is an object (not null)
* @param value the value
* @returns true if the value is a non-null object; otherwise false.
*
* @hidden
*/
function isObject(value: unknown): boolean {
return typeof value === 'object' && value !== null;
}
/**
* Signature of visitor functions, used with [[JsonPointer.visit]] method. Visitors are callbacks invoked for every segment/branch of a target's object graph.
*
* Tree descent occurs in-order, breadth first.
*/
export type Visitor = (ptr: JsonStringPointer, val: unknown) => void;
/** @hidden */
interface Item {
obj: unknown;
path: PathSegments;
}
/** @hidden */
function shouldDescend(obj: unknown): boolean {
return isObject(obj) && !JsonReference.isReference(obj);
}
/** @hidden */
function descendingVisit(
target: unknown,
visitor: Visitor,
encoder: Encoder,
): void {
const distinctObjects = new Map<unknown, JsonPointer>();
const q: Item[] = [{ obj: target, path: [] }];
while (q.length) {
const { obj, path } = q.shift() as Item;
visitor(encoder(path), obj);
if (shouldDescend(obj)) {
distinctObjects.set(
obj,
new JsonPointer(encodeUriFragmentIdentifier(path)),
);
if (!Array.isArray(obj)) {
const keys = Object.keys(obj as Record<string, unknown>);
const len = keys.length;
let i = -1;
while (++i < len) {
const it = (obj as Record<string, unknown>)[keys[i]];
if (isObject(it) && distinctObjects.has(it)) {
q.push({
obj: new JsonReference(distinctObjects.get(it) as JsonPointer),
path: path.concat(keys[i]),
});
} else {
q.push({
obj: it,
path: path.concat(keys[i]),
});
}
}
} else {
// handleArray
let j = -1;
const len = obj.length;
while (++j < len) {
const it = obj[j];
if (isObject(it) && distinctObjects.has(it)) {
q.push({
obj: new JsonReference(distinctObjects.get(it) as JsonPointer),
path: path.concat([j + '']),
});
} else {
q.push({
obj: it,
path: path.concat([j + '']),
});
}
}
}
}
}
}
/** @hidden */
const $ptr = Symbol('pointer');
/** @hidden */
const $frg = Symbol('fragmentId');
/** @hidden */
const $get = Symbol('getter');
/**
* Represents a JSON Pointer, capable of getting and setting the value on target
* objects at the pointer's location.
*
* While there are static variants for most operations, our recommendation is
* to use the instance level methods, which enables you avoid repeated
* compiling/emitting transient accessors. Take a look at the speed comparisons
* for our justification.
*
* In most cases, you should create and reuse instances of JsonPointer within
* scope that makes sense for your app. We often create constants for frequently
* used pointers, but your use case may vary.
*
* The following is a contrived example showing a function that uses pointers to
* deal with changes in the structure of data (a version independent function):
*
* ```ts
* import { JsonPointer } from 'json-ptr';
*
* export type SupportedVersion = '1.0' | '1.1';
*
* interface PrimaryGuestNamePointers {
* name: JsonPointer;
* surname: JsonPointer;
* honorific: JsonPointer;
* }
* const versions: Record<SupportedVersion, PrimaryGuestNamePointers> = {
* '1.0': {
* name: JsonPointer.create('/guests/0/name'),
* surname: JsonPointer.create('/guests/0/surname'),
* honorific: JsonPointer.create('/guests/0/honorific'),
* },
* '1.1': {
* name: JsonPointer.create('/primary/primaryGuest/name'),
* surname: JsonPointer.create('/primary/primaryGuest/surname'),
* honorific: JsonPointer.create('/primary/primaryGuest/honorific'),
* }
* };
*
* interface Reservation extends Record<string, unknown> {
* version?: SupportedVersion;
* }
*
* function primaryGuestName(reservation: Reservation): string {
* const pointers = versions[reservation.version || '1.0'];
* const name = pointers.name.get(reservation) as string;
* const surname = pointers.surname.get(reservation) as string;
* const honorific = pointers.honorific.get(reservation) as string;
* const names: string[] = [];
* if (honorific) names.push(honorific);
* if (name) names.push(name);
* if (surname) names.push(surname);
* return names.join(' ');
* }
*
* // The original layout of a reservation (only the parts relevant to our example)
* const reservationV1: Reservation = {
* guests: [{
* name: 'Wilbur',
* surname: 'Finkle',
* honorific: 'Mr.'
* }, {
* name: 'Wanda',
* surname: 'Finkle',
* honorific: 'Mrs.'
* }, {
* name: 'Wilma',
* surname: 'Finkle',
* honorific: 'Miss',
* child: true,
* age: 12
* }]
* // ...
* };
*
* // The new layout of a reservation (only the parts relevant to our example)
* const reservationV1_1: Reservation = {
* version: '1.1',
* primary: {
* primaryGuest: {
* name: 'Wilbur',
* surname: 'Finkle',
* honorific: 'Mr.'
* },
* additionalGuests: [{
* name: 'Wanda',
* surname: 'Finkle',
* honorific: 'Mrs.'
* }, {
* name: 'Wilma',
* surname: 'Finkle',
* honorific: 'Miss',
* child: true,
* age: 12
* }]
* // ...
* }
* // ...
* };
*
* console.log(primaryGuestName(reservationV1));
* console.log(primaryGuestName(reservationV1_1));
*
* ```
*
* There are many uses for pointers.
*/
export class JsonPointer {
/** @hidden */
private [$ptr]: JsonStringPointer;
/** @hidden */
private [$frg]: UriFragmentIdentifierPointer;
/** @hidden */
private [$get]: Dereference;
/**
* Factory function that creates a JsonPointer instance.
*
* ```ts
* const ptr = JsonPointer.create('/deeply/nested/data/0/here');
* ```
* _or_
* ```ts
* const ptr = JsonPointer.create(['deeply', 'nested', 'data', 0, 'here']);
* ```
* @param pointer the pointer or path.
*/
static create(pointer: Pointer | PathSegments): JsonPointer {
return new JsonPointer(pointer);
}
/**
* Determines if the specified `target`'s object graph has a value at the `pointer`'s location.
*
* ```ts
* const target = {
* first: 'second',
* third: ['fourth', 'fifth', { sixth: 'seventh' }],
* eighth: 'ninth'
* };
*
* console.log(JsonPointer.has(target, '/third/0'));
* // true
* console.log(JsonPointer.has(target, '/tenth'));
* // false
* ```
*
* @param target the target of the operation
* @param pointer the pointer or path
*/
static has(
target: unknown,
pointer: Pointer | PathSegments | JsonPointer,
): boolean {
if (typeof pointer === 'string' || Array.isArray(pointer)) {
pointer = new JsonPointer(pointer);
}
return (pointer as JsonPointer).has(target);
}
/**
* Gets the `target` object's value at the `pointer`'s location.
*
* ```ts
* const target = {
* first: 'second',
* third: ['fourth', 'fifth', { sixth: 'seventh' }],
* eighth: 'ninth'
* };
*
* console.log(JsonPointer.get(target, '/third/2/sixth'));
* // seventh
* console.log(JsonPointer.get(target, '/tenth'));
* // undefined
* ```
*
* @param target the target of the operation
* @param pointer the pointer or path.
*/
static get(
target: unknown,
pointer: Pointer | PathSegments | JsonPointer,
): unknown {
if (typeof pointer === 'string' || Array.isArray(pointer)) {
pointer = new JsonPointer(pointer);
}
return (pointer as JsonPointer).get(target);
}
/**
* Sets the `target` object's value, as specified, at the `pointer`'s location.
*
* ```ts
* const target = {
* first: 'second',
* third: ['fourth', 'fifth', { sixth: 'seventh' }],
* eighth: 'ninth'
* };
*
* console.log(JsonPointer.set(target, '/third/2/sixth', 'tenth'));
* // seventh
* console.log(JsonPointer.set(target, '/tenth', 'eleventh', true));
* // undefined
* console.log(JSON.stringify(target, null, ' '));
* // {
* // "first": "second",
* // "third": [
* // "fourth",
* // "fifth",
* // {
* // "sixth": "tenth"
* // }
* // ],
* // "eighth": "ninth",
* // "tenth": "eleventh"
* // }
* ```
*
* @param target the target of the operation
* @param pointer the pointer or path
* @param val a value to write into the object graph at the specified pointer location
* @param force indications whether the operation should force the pointer's location into existence in the object graph.
*
* @returns the prior value at the pointer's location in the object graph.
*/
static set(
target: unknown,
pointer: Pointer | PathSegments | JsonPointer,
val: unknown,
force = false,
): unknown {
if (typeof pointer === 'string' || Array.isArray(pointer)) {
pointer = new JsonPointer(pointer);
}
return (pointer as JsonPointer).set(target, val, force);
}
/**
* Removes the `target` object's value at the `pointer`'s location.
*
* ```ts
* const target = {
* first: 'second',
* third: ['fourth', 'fifth', { sixth: 'seventh' }],
* eighth: 'ninth'
* };
*
* console.log(JsonPointer.unset(target, '/third/2/sixth'));
* // seventh
* console.log(JsonPointer.unset(target, '/tenth'));
* // undefined
* console.log(JSON.stringify(target, null, ' '));
* // {
* // "first": "second",
* // "third": [
* // "fourth",
* // "fifth",
* // {}
* // ],
* // "eighth": "ninth",
* // }
* ```
* @param target the target of the operation
* @param pointer the pointer or path
*
* @returns the value that was removed from the object graph.
*/
static unset(
target: unknown,
pointer: Pointer | PathSegments | JsonPointer,
): unknown {
if (typeof pointer === 'string' || Array.isArray(pointer)) {
pointer = new JsonPointer(pointer);
}
return (pointer as JsonPointer).unset(target);
}
/**
* Decodes the specified pointer into path segments.
* @param pointer a string representation of a JSON Pointer
*/
static decode(pointer: Pointer): PathSegments {
return pickDecoder(pointer)(pointer);
}
/**
* Evaluates the target's object graph, calling the specified visitor for every unique pointer location discovered while walking the graph.
* @param target the target of the operation
* @param visitor a callback function invoked for each unique pointer location in the object graph
* @param fragmentId indicates whether the visitor should receive fragment identifiers or regular pointers
*/
static visit(target: unknown, visitor: Visitor, fragmentId = false): void {
descendingVisit(
target,
visitor,
fragmentId ? encodeUriFragmentIdentifier : encodePointer,
);
}
/**
* Evaluates the target's object graph, returning a [[JsonStringPointerListItem]] for each location in the graph.
* @param target the target of the operation
*/
static listPointers(target: unknown): JsonStringPointerListItem[] {
const res: JsonStringPointerListItem[] = [];
descendingVisit(
target,
(pointer, value): void => {
res.push({ pointer, value });
},
encodePointer,
);
return res;
}
/**
* Evaluates the target's object graph, returning a [[UriFragmentIdentifierPointerListItem]] for each location in the graph.
* @param target the target of the operation
*/
static listFragmentIds(
target: unknown,
): UriFragmentIdentifierPointerListItem[] {
const res: UriFragmentIdentifierPointerListItem[] = [];
descendingVisit(
target,
(fragmentId, value): void => {
res.push({ fragmentId, value });
},
encodeUriFragmentIdentifier,
);
return res;
}
/**
* Evaluates the target's object graph, returning a Record<Pointer, unknown> populated with pointers and the corresponding values from the graph.
* @param target the target of the operation
* @param fragmentId indicates whether the results are populated with fragment identifiers rather than regular pointers
*/
static flatten(
target: unknown,
fragmentId = false,
): Record<Pointer, unknown> {
const res: Record<Pointer, unknown> = {};
descendingVisit(
target,
(p, v) => {
res[p] = v;
},
fragmentId ? encodeUriFragmentIdentifier : encodePointer,
);
return res;
}
/**
* Evaluates the target's object graph, returning a Map<Pointer,unknown> populated with pointers and the corresponding values form the graph.
* @param target the target of the operation
* @param fragmentId indicates whether the results are populated with fragment identifiers rather than regular pointers
*/
static map(target: unknown, fragmentId = false): Map<Pointer, unknown> {
const res = new Map<Pointer, unknown>();
descendingVisit(
target,
res.set.bind(res),
fragmentId ? encodeUriFragmentIdentifier : encodePointer,
);
return res;
}
/**
* The pointer's decoded path segments.
*/
public readonly path: PathSegments;
/**
* Creates a new instance.
* @param ptr a string representation of a JSON Pointer, or a decoded array of path segments.
*/
constructor(ptr: Pointer | PathSegments) {
this.path = decodePtrInit(ptr);
}
/**
* Gets the target object's value at the pointer's location.
* @param target the target of the operation
*/
get(target: unknown): unknown {
if (!this[$get]) {
this[$get] = compilePointerDereference(this.path);
}
return this[$get](target);
}
/**
* Sets the target object's value, as specified, at the pointer's location.
*
* If any part of the pointer's path does not exist, the operation aborts
* without modification, unless the caller indicates that pointer's location
* should be created.
*
* @param target the target of the operation
* @param value the value to set
* @param force indicates whether the pointer's location should be created if it doesn't already exist.
*/
set(target: unknown, value: unknown, force = false): unknown {
return setValueAtPath(target, value, this.path, force);
}
/**
* Removes the target object's value at the pointer's location.
* @param target the target of the operation
*
* @returns the value that was removed from the object graph.
*/
unset(target: unknown): unknown {
return unsetValueAtPath(target, this.path);
}
/**
* Determines if the specified target's object graph has a value at the pointer's location.
* @param target the target of the operation
*/
has(target: unknown): boolean {
return typeof this.get(target) !== 'undefined';
}
/**
* Gets the value in the object graph that is the parent of the pointer location.
* @param target the target of the operation
*/
parent(target: unknown): unknown {
const p = this.path;
if (p.length == 1) return undefined;
const parent = new JsonPointer(p.slice(0, p.length - 1));
return parent.get(target);
}
/**
* Creates a new JsonPointer instance, pointing to the specified relative location in the object graph.
* @param ptr the relative pointer (relative to this)
* @returns A new instance that points to the relative location.
*/
relative(ptr: RelativeJsonPointer): JsonPointer {
const p = this.path;
const decoded = decodeRelativePointer(ptr) as string[];
const n = parseInt(decoded[0]);
if (n > p.length) throw new Error('Relative location does not exist.');
const r = p.slice(0, p.length - n).concat(decoded.slice(1));
if (decoded[0][decoded[0].length - 1] == '#') {
// It references the path segment/name, not the value
const name = r[r.length - 1] as string;
throw new Error(
`We won't compile a pointer that will always return '${name}'. Use JsonPointer.rel(target, ptr) instead.`,
);
}
return new JsonPointer(r);
}
/**
* Resolves the specified relative pointer path against the specified target object, and gets the target object's value at the relative pointer's location.
* @param target the target of the operation
* @param ptr the relative pointer (relative to this)
* @returns the value at the relative pointer's resolved path; otherwise undefined.
*/
rel(target: unknown, ptr: RelativeJsonPointer): unknown {
const p = this.path;
const decoded = decodeRelativePointer(ptr) as string[];
const n = parseInt(decoded[0]);
if (n > p.length) {
// out of bounds
return undefined;
}
const r = p.slice(0, p.length - n).concat(decoded.slice(1));
const other = new JsonPointer(r);
if (decoded[0][decoded[0].length - 1] == '#') {
// It references the path segment/name, not the value
const name = r[r.length - 1] as string;
const parent = other.parent(target);
return Array.isArray(parent) ? parseInt(name, 10) : name;
}
return other.get(target);
}
/**
* Creates a new instance by concatenating the specified pointer's path onto this pointer's path.
* @param ptr the string representation of a pointer, it's decoded path, or an instance of JsonPointer indicating the additional path to concatenate onto the pointer.
*/
concat(ptr: JsonPointer | Pointer | PathSegments): JsonPointer {
return new JsonPointer(
this.path.concat(
ptr instanceof JsonPointer ? ptr.path : decodePtrInit(ptr),
),
);
}
/**
* This pointer's JSON Pointer encoded string representation.
*/
get pointer(): JsonStringPointer {
if (this[$ptr] === undefined) {
this[$ptr] = encodePointer(this.path);
}
return this[$ptr];
}
/**
* This pointer's URI fragment identifier encoded string representation.
*/
get uriFragmentIdentifier(): UriFragmentIdentifierPointer {
if (!this[$frg]) {
this[$frg] = encodeUriFragmentIdentifier(this.path);
}
return this[$frg];
}
/**
* Emits the JSON Pointer encoded string representation.
*/
toString(): string {
return this.pointer;
}
}
/** @hidden */
const $pointer = Symbol('pointer');
/**
* A reference to a location in an object graph.
*
* This type is used by this module to break cycles in an object graph and to
* reference locations that have already been visited when enumerating pointers.
*/
export class JsonReference {
/**
* Determines if the specified `candidate` is a JsonReference.
* @param candidate the candidate
*/
static isReference(candidate: unknown): candidate is JsonReference {
if (!candidate) return false;
const ref = candidate as unknown as JsonReference;
return typeof ref.$ref === 'string' && typeof ref.resolve === 'function';
}
/** @hidden */
private readonly [$pointer]: JsonPointer;
/**
* A reference to a position if an object graph.
*/
public readonly $ref: UriFragmentIdentifierPointer;
/**
* Creates a new instance.
* @param pointer a JSON Pointer for the reference.
*/
constructor(pointer: JsonPointer | Pointer | PathSegments) {
this[$pointer] =
pointer instanceof JsonPointer ? pointer : new JsonPointer(pointer);
this.$ref = this[$pointer].uriFragmentIdentifier;
}
/**
* Resolves the reference against the `target` object, returning the value at
* the referenced pointer's location.
* @param target the target object
*/
resolve(target: unknown): unknown {
return this[$pointer].get(target);
}
/**
* Gets the reference's pointer.
*/
pointer(): JsonPointer {
return this[$pointer];
}
/**
* Gets the reference pointer's string representation (a URI fragment identifier).
*/
toString(): string {
return this.$ref;
}
} | the_stack |
import { ATN } from "antlr4ts/atn/ATN";
import { ATNDeserializer } from "antlr4ts/atn/ATNDeserializer";
import { CharStream } from "antlr4ts/CharStream";
import { Lexer } from "antlr4ts/Lexer";
import { LexerATNSimulator } from "antlr4ts/atn/LexerATNSimulator";
import { NotNull } from "antlr4ts/Decorators";
import { Override } from "antlr4ts/Decorators";
import { RuleContext } from "antlr4ts/RuleContext";
import { Vocabulary } from "antlr4ts/Vocabulary";
import { VocabularyImpl } from "antlr4ts/VocabularyImpl";
import * as Utils from "antlr4ts/misc/Utils";
export class ANTLRv4LexBasic extends Lexer {
// tslint:disable:no-trailing-whitespace
public static readonly channelNames: string[] = [
"DEFAULT_TOKEN_CHANNEL", "HIDDEN",
];
// tslint:disable:no-trailing-whitespace
public static readonly modeNames: string[] = [
"DEFAULT_MODE",
];
public static readonly ruleNames: string[] = [
"Ws", "Hws", "Vws", "BlockComment", "DocComment", "LineComment", "EscSeq",
"EscAny", "UnicodeEsc", "DecimalNumeral", "HexDigit", "DecDigit", "BoolLiteral",
"CharLiteral", "SQuoteLiteral", "DQuoteLiteral", "USQuoteLiteral", "NameChar",
"NameStartChar", "Int", "Esc", "Colon", "DColon", "SQuote", "DQuote",
"LParen", "RParen", "LBrace", "RBrace", "LBrack", "RBrack", "RArrow",
"Lt", "Gt", "Equal", "Question", "Star", "Plus", "PlusAssign", "Underscore",
"Pipe", "Dollar", "Comma", "Semi", "Dot", "Range", "At", "Pound", "Tilde",
];
private static readonly _LITERAL_NAMES: Array<string | undefined> = [
];
private static readonly _SYMBOLIC_NAMES: Array<string | undefined> = [
];
public static readonly VOCABULARY: Vocabulary = new VocabularyImpl(ANTLRv4LexBasic._LITERAL_NAMES, ANTLRv4LexBasic._SYMBOLIC_NAMES, []);
// @Override
// @NotNull
public get vocabulary(): Vocabulary {
return ANTLRv4LexBasic.VOCABULARY;
}
// tslint:enable:no-trailing-whitespace
constructor(input: CharStream) {
super(input);
this._interp = new LexerATNSimulator(ANTLRv4LexBasic._ATN, this);
}
// @Override
public get grammarFileName(): string { return "ANTLRv4LexBasic.g4"; }
// @Override
public get ruleNames(): string[] { return ANTLRv4LexBasic.ruleNames; }
// @Override
public get serializedATN(): string { return ANTLRv4LexBasic._serializedATN; }
// @Override
public get channelNames(): string[] { return ANTLRv4LexBasic.channelNames; }
// @Override
public get modeNames(): string[] { return ANTLRv4LexBasic.modeNames; }
public static readonly _serializedATN: string =
"\x03\uC91D\uCABA\u058D\uAFBA\u4F53\u0607\uEA8B\uC241\x02\x02\u0130\b\x01" +
"\x04\x02\t\x02\x04\x03\t\x03\x04\x04\t\x04\x04\x05\t\x05\x04\x06\t\x06" +
"\x04\x07\t\x07\x04\b\t\b\x04\t\t\t\x04\n\t\n\x04\v\t\v\x04\f\t\f\x04\r" +
"\t\r\x04\x0E\t\x0E\x04\x0F\t\x0F\x04\x10\t\x10\x04\x11\t\x11\x04\x12\t" +
"\x12\x04\x13\t\x13\x04\x14\t\x14\x04\x15\t\x15\x04\x16\t\x16\x04\x17\t" +
"\x17\x04\x18\t\x18\x04\x19\t\x19\x04\x1A\t\x1A\x04\x1B\t\x1B\x04\x1C\t" +
"\x1C\x04\x1D\t\x1D\x04\x1E\t\x1E\x04\x1F\t\x1F\x04 \t \x04!\t!\x04\"\t" +
"\"\x04#\t#\x04$\t$\x04%\t%\x04&\t&\x04\'\t\'\x04(\t(\x04)\t)\x04*\t*\x04" +
"+\t+\x04,\t,\x04-\t-\x04.\t.\x04/\t/\x040\t0\x041\t1\x042\t2\x03\x02\x03" +
"\x02\x05\x02h\n\x02\x03\x03\x03\x03\x03\x04\x03\x04\x03\x05\x03\x05\x03" +
"\x05\x03\x05\x07\x05r\n\x05\f\x05\x0E\x05u\v\x05\x03\x05\x03\x05\x03\x05" +
"\x05\x05z\n\x05\x03\x06\x03\x06\x03\x06\x03\x06\x03\x06\x07\x06\x81\n" +
"\x06\f\x06\x0E\x06\x84\v\x06\x03\x06\x03\x06\x03\x06\x05\x06\x89\n\x06" +
"\x03\x07\x03\x07\x03\x07\x03\x07\x07\x07\x8F\n\x07\f\x07\x0E\x07\x92\v" +
"\x07\x03\b\x03\b\x03\b\x03\b\x03\b\x05\b\x99\n\b\x03\t\x03\t\x03\t\x03" +
"\n\x03\n\x03\n\x03\n\x03\n\x05\n\xA3\n\n\x05\n\xA5\n\n\x05\n\xA7\n\n\x05" +
"\n\xA9\n\n\x03\v\x03\v\x03\v\x07\v\xAE\n\v\f\v\x0E\v\xB1\v\v\x05\v\xB3" +
"\n\v\x03\f\x03\f\x03\r\x03\r\x03\x0E\x03\x0E\x03\x0E\x03\x0E\x03\x0E\x03" +
"\x0E\x03\x0E\x03\x0E\x03\x0E\x05\x0E\xC2\n\x0E\x03\x0F\x03\x0F\x03\x0F" +
"\x05\x0F\xC7\n\x0F\x03\x0F\x03\x0F\x03\x10\x03\x10\x03\x10\x07\x10\xCE" +
"\n\x10\f\x10\x0E\x10\xD1\v\x10\x03\x10\x03\x10\x03\x11\x03\x11\x03\x11" +
"\x07\x11\xD8\n\x11\f\x11\x0E\x11\xDB\v\x11\x03\x11\x03\x11\x03\x12\x03" +
"\x12\x03\x12\x07\x12\xE2\n\x12\f\x12\x0E\x12\xE5\v\x12\x03\x13\x03\x13" +
"\x03\x13\x03\x13\x05\x13\xEB\n\x13\x03\x14\x03\x14\x03\x15\x03\x15\x03" +
"\x15\x03\x15\x03\x16\x03\x16\x03\x17\x03\x17\x03\x18\x03\x18\x03\x18\x03" +
"\x19\x03\x19\x03\x1A\x03\x1A\x03\x1B\x03\x1B\x03\x1C\x03\x1C\x03\x1D\x03" +
"\x1D\x03\x1E\x03\x1E\x03\x1F\x03\x1F\x03 \x03 \x03!\x03!\x03!\x03\"\x03" +
"\"\x03#\x03#\x03$\x03$\x03%\x03%\x03&\x03&\x03\'\x03\'\x03(\x03(\x03(" +
"\x03)\x03)\x03*\x03*\x03+\x03+\x03,\x03,\x03-\x03-\x03.\x03.\x03/\x03" +
"/\x03/\x030\x030\x031\x031\x032\x032\x04s\x82\x02\x023\x03\x02\x02\x05" +
"\x02\x02\x07\x02\x02\t\x02\x02\v\x02\x02\r\x02\x02\x0F\x02\x02\x11\x02" +
"\x02\x13\x02\x02\x15\x02\x02\x17\x02\x02\x19\x02\x02\x1B\x02\x02\x1D\x02" +
"\x02\x1F\x02\x02!\x02\x02#\x02\x02%\x02\x02\'\x02\x02)\x02\x02+\x02\x02" +
"-\x02\x02/\x02\x021\x02\x023\x02\x025\x02\x027\x02\x029\x02\x02;\x02\x02" +
"=\x02\x02?\x02\x02A\x02\x02C\x02\x02E\x02\x02G\x02\x02I\x02\x02K\x02\x02" +
"M\x02\x02O\x02\x02Q\x02\x02S\x02\x02U\x02\x02W\x02\x02Y\x02\x02[\x02\x02" +
"]\x02\x02_\x02\x02a\x02\x02c\x02\x02\x03\x02\r\x04\x02\v\v\"\"\x04\x02" +
"\f\f\x0E\x0F\x04\x02\f\f\x0F\x0F\n\x02$$))^^ddhhppttvv\x03\x023;\x05\x02" +
"2;CHch\x03\x022;\x06\x02\f\f\x0F\x0F))^^\x06\x02\f\f\x0F\x0F$$^^\x05\x02" +
"\xB9\xB9\u0302\u0371\u2041\u2042\x0F\x02C\\c|\xC2\xD8\xDA\xF8\xFA\u0301" +
"\u0372\u037F\u0381\u2001\u200E\u200F\u2072\u2191\u2C02\u2FF1\u3003\uD801" +
"\uF902\uFDD1\uFDF2\uFFFF\x02\u0118\x03g\x03\x02\x02\x02\x05i\x03\x02\x02" +
"\x02\x07k\x03\x02\x02\x02\tm\x03\x02\x02\x02\v{\x03\x02\x02\x02\r\x8A" +
"\x03\x02\x02\x02\x0F\x93\x03\x02\x02\x02\x11\x9A\x03\x02\x02\x02\x13\x9D" +
"\x03\x02\x02\x02\x15\xB2\x03\x02\x02\x02\x17\xB4\x03\x02\x02\x02\x19\xB6" +
"\x03\x02\x02\x02\x1B\xC1\x03\x02\x02\x02\x1D\xC3\x03\x02\x02\x02\x1F\xCA" +
"\x03\x02\x02\x02!\xD4\x03\x02\x02\x02#\xDE\x03\x02\x02\x02%\xEA\x03\x02" +
"\x02\x02\'\xEC\x03\x02\x02\x02)\xEE\x03\x02\x02\x02+\xF2\x03\x02\x02\x02" +
"-\xF4\x03\x02\x02\x02/\xF6\x03\x02\x02\x021\xF9\x03\x02\x02\x023\xFB\x03" +
"\x02\x02\x025\xFD\x03\x02\x02\x027\xFF\x03\x02\x02\x029\u0101\x03\x02" +
"\x02\x02;\u0103\x03\x02\x02\x02=\u0105\x03\x02\x02\x02?\u0107\x03\x02" +
"\x02\x02A\u0109\x03\x02\x02\x02C\u010C\x03\x02\x02\x02E\u010E\x03\x02" +
"\x02\x02G\u0110\x03\x02\x02\x02I\u0112\x03\x02\x02\x02K\u0114\x03\x02" +
"\x02\x02M\u0116\x03\x02\x02\x02O\u0118\x03\x02\x02\x02Q\u011B\x03\x02" +
"\x02\x02S\u011D\x03\x02\x02\x02U\u011F\x03\x02\x02\x02W\u0121\x03\x02" +
"\x02\x02Y\u0123\x03\x02\x02\x02[\u0125\x03\x02\x02\x02]\u0127\x03\x02" +
"\x02\x02_\u012A\x03\x02\x02\x02a\u012C\x03\x02\x02\x02c\u012E\x03\x02" +
"\x02\x02eh\x05\x05\x03\x02fh\x05\x07\x04\x02ge\x03\x02\x02\x02gf\x03\x02" +
"\x02\x02h\x04\x03\x02\x02\x02ij\t\x02\x02\x02j\x06\x03\x02\x02\x02kl\t" +
"\x03\x02\x02l\b\x03\x02\x02\x02mn\x071\x02\x02no\x07,\x02\x02os\x03\x02" +
"\x02\x02pr\v\x02\x02\x02qp\x03\x02\x02\x02ru\x03\x02\x02\x02st\x03\x02" +
"\x02\x02sq\x03\x02\x02\x02ty\x03\x02\x02\x02us\x03\x02\x02\x02vw\x07," +
"\x02\x02wz\x071\x02\x02xz\x07\x02\x02\x03yv\x03\x02\x02\x02yx\x03\x02" +
"\x02\x02z\n\x03\x02\x02\x02{|\x071\x02\x02|}\x07,\x02\x02}~\x07,\x02\x02" +
"~\x82\x03\x02\x02\x02\x7F\x81\v\x02\x02\x02\x80\x7F\x03\x02\x02\x02\x81" +
"\x84\x03\x02\x02\x02\x82\x83\x03\x02\x02\x02\x82\x80\x03\x02\x02\x02\x83" +
"\x88\x03\x02\x02\x02\x84\x82\x03\x02\x02\x02\x85\x86\x07,\x02\x02\x86" +
"\x89\x071\x02\x02\x87\x89\x07\x02\x02\x03\x88\x85\x03\x02\x02\x02\x88" +
"\x87\x03\x02\x02\x02\x89\f\x03\x02\x02\x02\x8A\x8B\x071\x02\x02\x8B\x8C" +
"\x071\x02\x02\x8C\x90\x03\x02\x02\x02\x8D\x8F\n\x04\x02\x02\x8E\x8D\x03" +
"\x02\x02\x02\x8F\x92\x03\x02\x02\x02\x90\x8E\x03\x02\x02\x02\x90\x91\x03" +
"\x02\x02\x02\x91\x0E\x03\x02\x02\x02\x92\x90\x03\x02\x02\x02\x93\x98\x05" +
"+\x16\x02\x94\x99\t\x05\x02\x02\x95\x99\x05\x13\n\x02\x96\x99\v\x02\x02" +
"\x02\x97\x99\x07\x02\x02\x03\x98\x94\x03\x02\x02\x02\x98\x95\x03\x02\x02" +
"\x02\x98\x96\x03\x02\x02\x02\x98\x97\x03\x02\x02\x02\x99\x10\x03\x02\x02" +
"\x02\x9A\x9B\x05+\x16\x02\x9B\x9C\v\x02\x02\x02\x9C\x12\x03\x02\x02\x02" +
"\x9D\xA8\x07w\x02\x02\x9E\xA6\x05\x17\f\x02\x9F\xA4\x05\x17\f\x02\xA0" +
"\xA2\x05\x17\f\x02\xA1\xA3\x05\x17\f\x02\xA2\xA1\x03\x02\x02\x02\xA2\xA3" +
"\x03\x02\x02\x02\xA3\xA5\x03\x02\x02\x02\xA4\xA0\x03\x02\x02\x02\xA4\xA5" +
"\x03\x02\x02\x02\xA5\xA7\x03\x02\x02\x02\xA6\x9F\x03\x02\x02\x02\xA6\xA7" +
"\x03\x02\x02\x02\xA7\xA9\x03\x02\x02\x02\xA8\x9E\x03\x02\x02\x02\xA8\xA9" +
"\x03\x02\x02\x02\xA9\x14\x03\x02\x02\x02\xAA\xB3\x072\x02\x02\xAB\xAF" +
"\t\x06\x02\x02\xAC\xAE\x05\x19\r\x02\xAD\xAC\x03\x02\x02\x02\xAE\xB1\x03" +
"\x02\x02\x02\xAF\xAD\x03\x02\x02\x02\xAF\xB0\x03\x02\x02\x02\xB0\xB3\x03" +
"\x02\x02\x02\xB1\xAF\x03\x02\x02\x02\xB2\xAA\x03\x02\x02\x02\xB2\xAB\x03" +
"\x02\x02\x02\xB3\x16\x03\x02\x02\x02\xB4\xB5\t\x07\x02\x02\xB5\x18\x03" +
"\x02\x02\x02\xB6\xB7\t\b\x02\x02\xB7\x1A\x03\x02\x02\x02\xB8\xB9\x07v" +
"\x02\x02\xB9\xBA\x07t\x02\x02\xBA\xBB\x07w\x02\x02\xBB\xC2\x07g\x02\x02" +
"\xBC\xBD\x07h\x02\x02\xBD\xBE\x07c\x02\x02\xBE\xBF\x07n\x02\x02\xBF\xC0" +
"\x07u\x02\x02\xC0\xC2\x07g\x02\x02\xC1\xB8\x03\x02\x02\x02\xC1\xBC\x03" +
"\x02\x02\x02\xC2\x1C\x03\x02\x02\x02\xC3\xC6\x051\x19\x02\xC4\xC7\x05" +
"\x0F\b\x02\xC5\xC7\n\t\x02\x02\xC6\xC4\x03\x02\x02\x02\xC6\xC5\x03\x02" +
"\x02\x02\xC7\xC8\x03\x02\x02\x02\xC8\xC9\x051\x19\x02\xC9\x1E\x03\x02" +
"\x02\x02\xCA\xCF\x051\x19\x02\xCB\xCE\x05\x0F\b\x02\xCC\xCE\n\t\x02\x02" +
"\xCD\xCB\x03\x02\x02\x02\xCD\xCC\x03\x02\x02\x02\xCE\xD1\x03\x02\x02\x02" +
"\xCF\xCD\x03\x02\x02\x02\xCF\xD0\x03\x02\x02\x02\xD0\xD2\x03\x02\x02\x02" +
"\xD1\xCF\x03\x02\x02\x02\xD2\xD3\x051\x19\x02\xD3 \x03\x02\x02\x02\xD4" +
"\xD9\x053\x1A\x02\xD5\xD8\x05\x0F\b\x02\xD6\xD8\n\n\x02\x02\xD7\xD5\x03" +
"\x02\x02\x02\xD7\xD6\x03\x02\x02\x02\xD8\xDB\x03\x02\x02\x02\xD9\xD7\x03" +
"\x02\x02\x02\xD9\xDA\x03\x02\x02\x02\xDA\xDC\x03\x02\x02\x02\xDB\xD9\x03" +
"\x02\x02\x02\xDC\xDD\x053\x1A\x02\xDD\"\x03\x02\x02\x02\xDE\xE3\x051\x19" +
"\x02\xDF\xE2\x05\x0F\b\x02\xE0\xE2\n\t\x02\x02\xE1\xDF\x03\x02\x02\x02" +
"\xE1\xE0\x03\x02\x02\x02\xE2\xE5\x03\x02\x02\x02\xE3\xE1\x03\x02\x02\x02" +
"\xE3\xE4\x03\x02\x02\x02\xE4$\x03\x02\x02\x02\xE5\xE3\x03\x02\x02\x02" +
"\xE6\xEB\x05\'\x14\x02\xE7\xEB\x042;\x02\xE8\xEB\x05Q)\x02\xE9\xEB\t\v" +
"\x02\x02\xEA\xE6\x03\x02\x02\x02\xEA\xE7\x03\x02\x02\x02\xEA\xE8\x03\x02" +
"\x02\x02\xEA\xE9\x03\x02\x02\x02\xEB&\x03\x02\x02\x02\xEC\xED\t\f\x02" +
"\x02\xED(\x03\x02\x02\x02\xEE\xEF\x07k\x02\x02\xEF\xF0\x07p\x02\x02\xF0" +
"\xF1\x07v\x02\x02\xF1*\x03\x02\x02\x02\xF2\xF3\x07^\x02\x02\xF3,\x03\x02" +
"\x02\x02\xF4\xF5\x07<\x02\x02\xF5.\x03\x02\x02\x02\xF6\xF7\x07<\x02\x02" +
"\xF7\xF8\x07<\x02\x02\xF80\x03\x02\x02\x02\xF9\xFA\x07)\x02\x02\xFA2\x03" +
"\x02\x02\x02\xFB\xFC\x07$\x02\x02\xFC4\x03\x02\x02\x02\xFD\xFE\x07*\x02" +
"\x02\xFE6\x03\x02\x02\x02\xFF\u0100\x07+\x02\x02\u01008\x03\x02\x02\x02" +
"\u0101\u0102\x07}\x02\x02\u0102:\x03\x02\x02\x02\u0103\u0104\x07\x7F\x02" +
"\x02\u0104<\x03\x02\x02\x02\u0105\u0106\x07]\x02\x02\u0106>\x03\x02\x02" +
"\x02\u0107\u0108\x07_\x02\x02\u0108@\x03\x02\x02\x02\u0109\u010A\x07/" +
"\x02\x02\u010A\u010B\x07@\x02\x02\u010BB\x03\x02\x02\x02\u010C\u010D\x07" +
">\x02\x02\u010DD\x03\x02\x02\x02\u010E\u010F\x07@\x02\x02\u010FF\x03\x02" +
"\x02\x02\u0110\u0111\x07?\x02\x02\u0111H\x03\x02\x02\x02\u0112\u0113\x07" +
"A\x02\x02\u0113J\x03\x02\x02\x02\u0114\u0115\x07,\x02\x02\u0115L\x03\x02" +
"\x02\x02\u0116\u0117\x07-\x02\x02\u0117N\x03\x02\x02\x02\u0118\u0119\x07" +
"-\x02\x02\u0119\u011A\x07?\x02\x02\u011AP\x03\x02\x02\x02\u011B\u011C" +
"\x07a\x02\x02\u011CR\x03\x02\x02\x02\u011D\u011E\x07~\x02\x02\u011ET\x03" +
"\x02\x02\x02\u011F\u0120\x07&\x02\x02\u0120V\x03\x02\x02\x02\u0121\u0122" +
"\x07.\x02\x02\u0122X\x03\x02\x02\x02\u0123\u0124\x07=\x02\x02\u0124Z\x03" +
"\x02\x02\x02\u0125\u0126\x070\x02\x02\u0126\\\x03\x02\x02\x02\u0127\u0128" +
"\x070\x02\x02\u0128\u0129\x070\x02\x02\u0129^\x03\x02\x02\x02\u012A\u012B" +
"\x07B\x02\x02\u012B`\x03\x02\x02\x02\u012C\u012D\x07%\x02\x02\u012Db\x03" +
"\x02\x02\x02\u012E\u012F\x07\x80\x02\x02\u012Fd\x03\x02\x02\x02\x19\x02" +
"gsy\x82\x88\x90\x98\xA2\xA4\xA6\xA8\xAF\xB2\xC1\xC6\xCD\xCF\xD7\xD9\xE1" +
"\xE3\xEA\x02";
public static __ATN: ATN;
public static get _ATN(): ATN {
if (!ANTLRv4LexBasic.__ATN) {
ANTLRv4LexBasic.__ATN = new ATNDeserializer().deserialize(Utils.toCharArray(ANTLRv4LexBasic._serializedATN));
}
return ANTLRv4LexBasic.__ATN;
}
} | the_stack |
import { hasAccessibleAcl } from "../acl/acl";
import {
getResourceInfoWithAcr,
hasAccessibleAcr,
saveAcrFor,
} from "../acp/acp";
import { UrlString, WebId } from "../interfaces";
import {
getSourceIri,
internal_defaultFetchOptions,
} from "../resource/resource";
import {
internal_getAgentAccess as getAgentAccessAcp,
internal_getAgentAccessAll as getAgentAccessAllAcp,
internal_setAgentAccess as setAgentAccessAcp,
internal_getGroupAccess as getGroupAccessAcp,
internal_getGroupAccessAll as getGroupAccessAllAcp,
internal_setGroupAccess as setGroupAccessAcp,
internal_getPublicAccess as getPublicAccessAcp,
internal_setPublicAccess as setPublicAccessAcp,
internal_getPoliciesAndRules,
} from "./acp_v1";
import {
getAgentAccess as getAgentAccessWac,
getAgentAccessAll as getAgentAccessAllWac,
setAgentResourceAccess as setAgentAccessWac,
getGroupAccess as getGroupAccessWac,
getGroupAccessAll as getGroupAccessAllWac,
setGroupResourceAccess as setGroupAccessWac,
getPublicAccess as getPublicAccessWac,
setPublicResourceAccess as setPublicAccessWac,
WacAccess,
} from "./wac";
/**
* Each of the following access modes is in one of two states:
* - true: this access mode is granted, or
* - false: this access mode is not granted.
* @since 1.5.0
*/
export interface Access {
read: boolean;
append: boolean;
write: boolean;
controlRead: boolean;
controlWrite: boolean;
}
/**
* Get an overview of what access is defined for a given Agent.
*
* This function works with Solid Pods that implement either the Web Access
* Control spec or the Access Control Policies proposal, with some caveats:
*
* - If access to the given Resource has been set using anything other than the
* functions in this module, it is possible that it has been set in a way that
* prevents this function from reliably reading access, in which case it will
* resolve to `null`.
* - It will only return access specified explicitly for the given Agent. If
* additional restrictions are set up to apply to the given Agent in a
* particular situation, those will not be reflected in the return value of
* this function.
* - It will only return access specified explicitly for the given Resource.
* In other words, if the Resource is a Container, the returned Access may not
* apply to contained Resources.
* - If the current user does not have permission to view access for the given
* Resource, this function will resolve to `null`.
*
* @param resourceUrl URL of the Resource you want to read the access for.
* @param webId WebID of the Agent you want to get the access for.
* @since 1.5.0
*/
export async function getAgentAccess(
resourceUrl: UrlString,
webId: WebId,
options = internal_defaultFetchOptions
): Promise<Access | null> {
const resourceInfo = await getResourceInfoWithAcr(resourceUrl, options);
if (hasAccessibleAcr(resourceInfo)) {
const acpData = await internal_getPoliciesAndRules(resourceInfo, options);
return getAgentAccessAcp(acpData, webId);
}
if (hasAccessibleAcl(resourceInfo)) {
return await getAgentAccessWac(resourceInfo, webId, options);
}
return null;
}
/**
* Set access to a Resource for a specific Agent.
*
* This function works with Solid Pods that implement either the Web Access
* Control spec or the Access Control Policies proposal, with some caveats:
*
* - If access to the given Resource has been set using anything other than the
* functions in this module, it is possible that it has been set in a way that
* prevents this function from reliably setting access, in which case it will
* resolve to `null`.
* - It will only set access explicitly for the given Agent. In other words,
* additional restrictions could be present that further restrict or loosen
* what access the given Agent has in particular circumstances.
* - The provided access will only apply to the given Resource. In other words,
* if the Resource is a Container, the configured Access may not apply to
* contained Resources.
* - If the current user does not have permission to view or change access for
* the given Resource, this function will resolve to `null`.
*
* Additionally, two caveats apply to users with a Pod server that uses WAC:
* - If the Resource did not have an ACL yet, a new one will be initialised.
* This means that changes to the ACL of a parent Container can no longer
* affect access people have to this Resource, although existing access will
* be preserved.
* - Setting different values for `controlRead` and `controlWrite` is not
* supported, and **will throw an error**. If you expect (some of) your users
* to have Pods implementing WAC, be sure to pass the same value for both.
*
* @param resourceUrl URL of the Resource you want to change the Agent's access to.
* @param webId WebID of the Agent you want to set access for.
* @param access What access permissions you want to set for the given Agent to the given Resource. Possible properties are `read`, `append`, `write`, `controlRead` and `controlWrite`: set to `true` to allow, to `false` to stop allowing, or `undefined` to leave unchanged. Take note that `controlRead` and `controlWrite` can not have distinct values for a Pod server implementing Web Access Control; trying this will throw an error.
* @returns What access has been set for the given Agent explicitly.
* @since 1.5.0
*/
export async function setAgentAccess(
resourceUrl: UrlString,
webId: WebId,
access: Partial<Access>,
options = internal_defaultFetchOptions
): Promise<Access | null> {
const resourceInfo = await getResourceInfoWithAcr(resourceUrl, options);
if (hasAccessibleAcr(resourceInfo)) {
const acpData = await internal_getPoliciesAndRules(resourceInfo, options);
const updatedResource = setAgentAccessAcp(
resourceInfo,
acpData,
webId,
access
);
if (updatedResource) {
try {
await saveAcrFor(updatedResource, options);
return await getAgentAccess(resourceUrl, webId, options);
} catch (e) {
return null;
}
}
return null;
}
if (hasAccessibleAcl(resourceInfo)) {
if (access.controlRead != access.controlWrite) {
throw new Error(
`When setting access for a Resource in a Pod implementing Web Access Control (i.e. [${getSourceIri(
resourceInfo
)}]), ` + "`controlRead` and `controlWrite` should have the same value."
);
}
const wacAccess = access as WacAccess;
await setAgentAccessWac(resourceInfo, webId, wacAccess, options);
return await getAgentAccessWac(resourceInfo, webId, options);
}
return null;
}
/**
* Get an overview of what access is defined for all Agents with respect to a given
* Resource.
*
* This function works with Solid Pods that implement either the Web Access
* Control spec or the Access Control Policies proposal, with some caveats:
*
* - If access to the given Resource has been set using anything other than the
* functions in this module, it is possible that it has been set in a way that
* prevents this function from reliably reading access, in which case it will
* resolve to `null`.
* - It will only return access specified explicitly for the returned Agents. If
* additional restrictions are set up to apply to the listed Agents in a
* particular situation, those will not be reflected in the return value of
* this function.
* - It will only return access specified explicitly for the given Resource.
* In other words, if the Resource is a Container, the returned Access may not
* apply to contained Resources.
* - If the current user does not have permission to view access for the given
* Resource, this function will resolve to `null`.
*
* @param resourceUrl URL of the Resource you want to read the access for.
* @returns The access information to the Resource, grouped by Agent.
* @since 1.5.0
*/
export async function getAgentAccessAll(
resourceUrl: UrlString,
options = internal_defaultFetchOptions
): Promise<Record<WebId, Access> | null> {
const resourceInfo = await getResourceInfoWithAcr(resourceUrl, options);
if (hasAccessibleAcr(resourceInfo)) {
const acpData = await internal_getPoliciesAndRules(resourceInfo, options);
return getAgentAccessAllAcp(acpData);
}
if (hasAccessibleAcl(resourceInfo)) {
return await getAgentAccessAllWac(resourceInfo, options);
}
return null;
}
/**
* Get an overview of what access is defined for a given Group.
*
* This function works with Solid Pods that implement either the Web Access
* Control spec or the Access Control Policies proposal, with some caveats:
*
* - If access to the given Resource has been set using anything other than the
* functions in this module, it is possible that it has been set in a way that
* prevents this function from reliably reading access, in which case it will
* resolve to `null`.
* - It will only return access specified explicitly for the given Group. If
* additional restrictions are set up to apply to the given Group in a
* particular situation, those will not be reflected in the return value of
* this function.
* - It will only return access specified explicitly for the given Resource.
* In other words, if the Resource is a Container, the returned Access may not
* apply to contained Resources.
* - If the current user does not have permission to view access for the given
* Resource, this function will resolve to `null`.
*
* @param resourceUrl URL of the Resource you want to read the access for.
* @param webId WebID of the Group you want to get the access for.
* @since 1.5.0
* @deprecated Access Control Policies will no longer support vcard:Group. Use the mechanism-specific access API's if you want to define access for groups of people.
*/
export async function getGroupAccess(
resourceUrl: UrlString,
webId: WebId,
options = internal_defaultFetchOptions
): Promise<Access | null> {
const resourceInfo = await getResourceInfoWithAcr(resourceUrl, options);
if (hasAccessibleAcr(resourceInfo)) {
const acpData = await internal_getPoliciesAndRules(resourceInfo, options);
return getGroupAccessAcp(acpData, webId);
}
if (hasAccessibleAcl(resourceInfo)) {
return await getGroupAccessWac(resourceInfo, webId, options);
}
return null;
}
/**
* Get an overview of what access is defined for all Groups with respect to a given
* Resource.
*
* This function works with Solid Pods that implement either the Web Access
* Control spec or the Access Control Policies proposal, with some caveats:
*
* - If access to the given Resource has been set using anything other than the
* functions in this module, it is possible that it has been set in a way that
* prevents this function from reliably reading access, in which case it will
* resolve to `null`.
* - It will only return access specified explicitly for the returned Groups. If
* additional restrictions are set up to apply to the listed Groups in a
* particular situation, those will not be reflected in the return value of
* this function.
* - It will only return access specified explicitly for the given Resource.
* In other words, if the Resource is a Container, the returned Access may not
* apply to contained Resources.
* - If the current user does not have permission to view access for the given
* Resource, this function will resolve to `null`.
*
* @param resourceUrl URL of the Resource you want to read the access for.
* @returns The access information to the Resource, sorted by Group.
* @since 1.5.0
* @deprecated Access Control Policies will no longer support vcard:Group. Use the mechanism-specific access API's if you want to define access for groups of people.
*/
export async function getGroupAccessAll(
resourceUrl: UrlString,
options = internal_defaultFetchOptions
): Promise<Record<UrlString, Access> | null> {
const resourceInfo = await getResourceInfoWithAcr(resourceUrl, options);
if (hasAccessibleAcr(resourceInfo)) {
const acpData = await internal_getPoliciesAndRules(resourceInfo, options);
return getGroupAccessAllAcp(acpData);
}
if (hasAccessibleAcl(resourceInfo)) {
return await getGroupAccessAllWac(resourceInfo, options);
}
return null;
}
/**
* Set access to a Resource for a specific Group.
*
* This function works with Solid Pods that implement either the Web Access
* Control spec or the Access Control Policies proposal, with some caveats:
*
* - If access to the given Resource has been set using anything other than the
* functions in this module, it is possible that it has been set in a way that
* prevents this function from reliably setting access, in which case it will
* resolve to `null`.
* - It will only set access explicitly for the given Group. In other words,
* additional restrictions could be present that further restrict or loosen
* what access the given Group has in particular circumstances.
* - The provided access will only apply to the given Resource. In other words,
* if the Resource is a Container, the configured Access may not apply to
* contained Resources.
* - If the current user does not have permission to view or change access for
* the given Resource, this function will resolve to `null`.
*
* Additionally, two caveats apply to users with a Pod server that uses WAC:
* - If the Resource did not have an ACL yet, a new one will be initialised.
* This means that changes to the ACL of a parent Container can no longer
* affect access people have to this Resource, although existing access will
* be preserved.
* - Setting different values for `controlRead` and `controlWrite` is not
* supported, and **will throw an error**. If you expect (some of) your users
* to have Pods implementing WAC, be sure to pass the same value for both.
*
* @param resourceUrl URL of the Resource you want to change the Group's access to.
* @param groupUrl URL of the Group you want to set access for.
* @param access What access permissions you want to set for the given Group to the given Resource. Possible properties are `read`, `append`, `write`, `controlRead` and `controlWrite`: set to `true` to allow, to `false` to stop allowing, or `undefined` to leave unchanged. Take note that `controlRead` and `controlWrite` can not have distinct values for a Pod server implementing Web Access Control; trying this will throw an error.
* @returns What access has been set for the given Group explicitly.
* @since 1.5.0
* @deprecated Access Control Policies will no longer support vcard:Group. Use the mechanism-specific access API's if you want to define access for groups of people.
*/
export async function setGroupAccess(
resourceUrl: UrlString,
groupUrl: UrlString,
access: Partial<Access>,
options = internal_defaultFetchOptions
): Promise<Access | null> {
const resourceInfo = await getResourceInfoWithAcr(resourceUrl, options);
if (hasAccessibleAcr(resourceInfo)) {
const acpData = await internal_getPoliciesAndRules(resourceInfo, options);
const updatedResource = setGroupAccessAcp(
resourceInfo,
acpData,
groupUrl,
access
);
if (updatedResource) {
try {
await saveAcrFor(updatedResource, options);
return getGroupAccess(resourceUrl, groupUrl, options);
} catch (e) {
return null;
}
}
return null;
}
if (hasAccessibleAcl(resourceInfo)) {
if (access.controlRead != access.controlWrite) {
throw new Error(
`When setting access for a Resource in a Pod implementing Web Access Control (i.e. [${getSourceIri(
resourceInfo
)}]), ` + "`controlRead` and `controlWrite` should have the same value."
);
}
const wacAccess = access as WacAccess;
await setGroupAccessWac(resourceInfo, groupUrl, wacAccess, options);
return await getGroupAccessWac(resourceInfo, groupUrl, options);
}
return null;
}
/**
* Get an overview of what access is defined for everyone.
*
* This function works with Solid Pods that implement either the Web Access
* Control spec or the Access Control Policies proposal, with some caveats:
*
* - If access to the given Resource has been set using anything other than the
* functions in this module, it is possible that it has been set in a way that
* prevents this function from reliably reading access, in which case it will
* resolve to `null`.
* - It will only return access specified explicitly for everyone. If
* additional restrictions are set up to apply to users in a particular
* situation, those will not be reflected in the return value of this
* function.
* - It will only return access specified explicitly for the given Resource.
* In other words, if the Resource is a Container, the returned Access may not
* apply to contained Resources.
* - If the current user does not have permission to view access for the given
* Resource, this function will resolve to `null`.
*
* @param resourceUrl URL of the Resource you want to read the access for.
* @since 1.5.0
*/
export async function getPublicAccess(
resourceUrl: UrlString,
options = internal_defaultFetchOptions
): Promise<Access | null> {
const resourceInfo = await getResourceInfoWithAcr(resourceUrl, options);
if (hasAccessibleAcr(resourceInfo)) {
const acpData = await internal_getPoliciesAndRules(resourceInfo, options);
return getPublicAccessAcp(acpData);
}
if (hasAccessibleAcl(resourceInfo)) {
return await getPublicAccessWac(resourceInfo, options);
}
return null;
}
/**
* Set access to a Resource for everybody.
*
* This function works with Solid Pods that implement either the Web Access
* Control spec or the Access Control Policies proposal, with some caveats:
*
* - If access to the given Resource has been set using anything other than the
* functions in this module, it is possible that it has been set in a way that
* prevents this function from reliably setting access, in which case it will
* resolve to `null`.
* - It will only set access explicitly for everybody. In other words,
* additional restrictions could be present that further restrict or loosen
* what access a user has in particular circumstances.
* - The provided access will only apply to the given Resource. In other words,
* if the Resource is a Container, the configured Access may not apply to
* contained Resources.
* - If the current user does not have permission to view or change access for
* the given Resource, this function will resolve to `null`.
*
* Additionally, two caveats apply to users with a Pod server that uses WAC:
* - If the Resource did not have an ACL yet, a new one will be initialised.
* This means that changes to the ACL of a parent Container can no longer
* affect access people have to this Resource, although existing access will
* be preserved.
* - Setting different values for `controlRead` and `controlWrite` is not
* supported, and **will throw an error**. If you expect (some of) your users
* to have Pods implementing WAC, be sure to pass the same value for both.
*
* @param resourceUrl URL of the Resource you want to change public access to.
* @param access What access permissions you want to set for everybody to the given Resource. Possible properties are `read`, `append`, `write`, `controlRead` and `controlWrite`: set to `true` to allow, to `false` to stop allowing, or `undefined` to leave unchanged. Take note that `controlRead` and `controlWrite` can not have distinct values for a Pod server implementing Web Access Control; trying this will throw an error.
* @returns What access has been set for everybody explicitly.
* @since 1.5.0
*/
export async function setPublicAccess(
resourceUrl: UrlString,
access: Partial<Access>,
options = internal_defaultFetchOptions
): Promise<Access | null> {
const resourceInfo = await getResourceInfoWithAcr(resourceUrl, options);
if (hasAccessibleAcr(resourceInfo)) {
const acpData = await internal_getPoliciesAndRules(resourceInfo, options);
const updatedResource = setPublicAccessAcp(resourceInfo, acpData, access);
if (updatedResource) {
try {
await saveAcrFor(updatedResource, options);
return getPublicAccess(resourceUrl, options);
} catch (e) {
return null;
}
}
return null;
}
if (hasAccessibleAcl(resourceInfo)) {
if (access.controlRead != access.controlWrite) {
throw new Error(
`When setting access for a Resource in a Pod implementing Web Access Control (i.e. [${getSourceIri(
resourceInfo
)}]), ` + "`controlRead` and `controlWrite` should have the same value."
);
}
const wacAccess = access as WacAccess;
await setPublicAccessWac(resourceInfo, wacAccess, options);
return await getPublicAccessWac(resourceInfo, options);
}
return null;
}
export { getAccessFor, getAccessForAll, setAccessFor } from "./for"; | the_stack |
* @module Tools
*/
import { BeDuration } from "@itwin/core-bentley";
import { Camera, ColorDef, Hilite } from "@itwin/core-common";
import {
DrawingViewState, FlashMode, FlashSettings, FlashSettingsOptions, IModelApp, TileBoundingBoxes, Tool, Viewport,
} from "@itwin/core-frontend";
import { parseArgs } from "./parseArgs";
import { parseToggle } from "./parseToggle";
/** Base class for a tool that toggles some aspect of a Viewport.
* @beta
*/
export abstract class ViewportToggleTool extends Tool {
public static override get minArgs() { return 0; }
public static override get maxArgs() { return 1; }
protected abstract toggle(vp: Viewport, enable?: boolean): Promise<void>;
public override async run(enable?: boolean): Promise<boolean> {
const vp = IModelApp.viewManager.selectedView;
if (undefined !== vp)
await this.toggle(vp, enable);
return true;
}
public override async parseAndRun(...args: string[]): Promise<boolean> {
const enable = parseToggle(args[0]);
if (typeof enable !== "string")
await this.run(enable);
return true;
}
}
/** Freeze or unfreeze the scene for the selected viewport. While the scene is frozen, no new tiles will be selected for drawing within the viewport.
* @beta
*/
export class FreezeSceneTool extends ViewportToggleTool {
public static override toolId = "FreezeScene";
protected override async toggle(vp: Viewport, enable?: boolean) {
if (undefined === enable || enable !== vp.freezeScene)
vp.freezeScene = !vp.freezeScene;
return Promise.resolve();
}
}
const boundingVolumeNames = [
"none",
"volume",
"content",
"both",
"children",
"sphere",
];
/** Set the tile bounding volume decorations to display in the selected viewport.
* Omitting the argument turns on Volume bounding boxes if bounding boxes are currently off; otherwise, toggles them off.
* Allowed inputs are "none", "volume", "content", "both" (volume and content), "children", and "sphere".
* @beta
*/
export class ShowTileVolumesTool extends Tool {
public static override toolId = "ShowTileVolumes";
public static override get minArgs() { return 0; }
public static override get maxArgs() { return 1; }
public override async run(boxes?: TileBoundingBoxes): Promise<boolean> {
const vp = IModelApp.viewManager.selectedView;
if (undefined === vp)
return true;
if (undefined === boxes)
boxes = TileBoundingBoxes.None === vp.debugBoundingBoxes ? TileBoundingBoxes.Volume : TileBoundingBoxes.None;
vp.debugBoundingBoxes = boxes;
return true;
}
public override async parseAndRun(...args: string[]): Promise<boolean> {
let boxes: TileBoundingBoxes | undefined;
if (0 !== args.length) {
const arg = args[0].toLowerCase();
for (let i = 0; i < boundingVolumeNames.length; i++) {
if (arg === boundingVolumeNames[i]) {
boxes = i;
break;
}
}
if (undefined === boxes)
return true;
}
return this.run(boxes);
}
}
/** This tool sets the aspect ratio skew for the selected viewport.
* @beta
*/
export class SetAspectRatioSkewTool extends Tool {
public static override toolId = "SetAspectRatioSkew";
public static override get minArgs() { return 0; }
public static override get maxArgs() { return 1; }
/** This method runs the tool, setting the aspect ratio skew for the selected viewport.
* @param skew the aspect ratio (x/y) skew value; 1.0 or undefined removes any skew
*/
public override async run(skew?: number): Promise<boolean> {
if (undefined === skew)
skew = 1.0;
const vp = IModelApp.viewManager.selectedView;
if (undefined !== vp) {
vp.view.setAspectRatioSkew(skew);
vp.synchWithView();
}
return true;
}
/** Executes this tool's run method.
* @param args the first entry of this array contains the `skew` argument
* @see [[run]]
*/
public override async parseAndRun(...args: string[]): Promise<boolean> {
const skew = args.length > 0 ? parseFloat(args[0]) : 1.0;
return !Number.isNaN(skew) && this.run(skew);
}
}
/** Changes the selected viewport's hilite or emphasis settings.
* @beta
*/
export abstract class ChangeHiliteTool extends Tool {
public static override get minArgs() { return 0; }
public static override get maxArgs() { return 6; }
public override async run(settings?: Hilite.Settings): Promise<boolean> {
const vp = IModelApp.viewManager.selectedView;
if (undefined !== vp)
this.apply(vp, settings);
return true;
}
protected abstract apply(vp: Viewport, settings: Hilite.Settings | undefined): void;
protected abstract getCurrentSettings(vp: Viewport): Hilite.Settings;
public override async parseAndRun(...inputArgs: string[]): Promise<boolean> {
if (0 === inputArgs.length)
return this.run();
const vp = IModelApp.viewManager.selectedView;
if (undefined === vp)
return true;
const cur = this.getCurrentSettings(vp);
const colors = cur.color.colors;
let visible = cur.visibleRatio;
let hidden = cur.hiddenRatio;
let silhouette = cur.silhouette;
const args = parseArgs(inputArgs);
const parseColorComponent = (c: "r" | "g" | "b") => {
const num = args.getInteger(c);
if (undefined !== num)
colors[c] = Math.floor(Math.max(0, Math.min(255, num)));
};
parseColorComponent("r");
parseColorComponent("g");
parseColorComponent("b");
const silhouetteArg = args.getInteger("s");
if (undefined !== silhouetteArg && silhouetteArg >= Hilite.Silhouette.None && silhouetteArg <= Hilite.Silhouette.Thick)
silhouette = silhouetteArg;
const v = args.getFloat("v");
if (undefined !== v && v >= 0 && v <= 1)
visible = v;
const h = args.getFloat("h");
if (undefined !== h && h >= 0 && h <= 1)
hidden = h;
if (undefined === silhouette)
silhouette = cur.silhouette;
if (undefined === visible)
visible = cur.visibleRatio;
if (undefined === hidden)
hidden = cur.hiddenRatio;
const settings: Hilite.Settings = {
color: ColorDef.from(colors.r, colors.g, colors.b),
silhouette,
visibleRatio: visible,
hiddenRatio: hidden,
};
return this.run(settings);
}
}
/** Changes the selected viewport's hilite settings, or resets to defaults.
* @beta
*/
export class ChangeHiliteSettingsTool extends ChangeHiliteTool {
public static override toolId = "ChangeHiliteSettings";
protected getCurrentSettings(vp: Viewport) { return vp.hilite; }
protected apply(vp: Viewport, settings?: Hilite.Settings): void {
vp.hilite = undefined !== settings ? settings : new Hilite.Settings();
}
}
/** Changes the selected viewport's emphasis settings.
* @beta
*/
export class ChangeEmphasisSettingsTool extends ChangeHiliteTool {
public static override toolId = "ChangeEmphasisSettings";
protected getCurrentSettings(vp: Viewport) { return vp.emphasisSettings; }
protected apply(vp: Viewport, settings?: Hilite.Settings): void {
if (undefined !== settings)
vp.emphasisSettings = settings;
}
}
/** Changes the [FlashSettings]($frontend) for the selected [Viewport]($frontend).
* @beta
*/
export class ChangeFlashSettingsTool extends Tool {
public static override toolId = "ChangeFlashSettings";
public static override get minArgs() { return 0; }
public static override get maxArgs() { return 3; }
public override async run(settings?: FlashSettings): Promise<boolean> {
const vp = IModelApp.viewManager.selectedView;
if (vp)
vp.flashSettings = settings ?? new FlashSettings();
return true;
}
public override async parseAndRun(...inputArgs: string[]): Promise<boolean> {
const vp = IModelApp.viewManager.selectedView;
if (!vp)
return true;
if (1 === inputArgs.length && "default" === inputArgs[0].toLowerCase())
return this.run();
const options: FlashSettingsOptions = {};
const args = parseArgs(inputArgs);
const intensity = args.getFloat("i");
if (undefined !== intensity)
options.maxIntensity = intensity;
const mode = args.get("m");
if (mode) {
switch (mode[0].toLowerCase()) {
case "b":
options.litMode = FlashMode.Brighten;
break;
case "h":
options.litMode = FlashMode.Hilite;
break;
default:
return false;
}
}
const duration = args.getFloat("d");
if (undefined !== duration)
options.duration = BeDuration.fromSeconds(duration);
return this.run(vp.flashSettings.clone(options));
}
}
/** Enables or disables fade-out transparency mode for the selected viewport.
* @beta
*/
export class FadeOutTool extends ViewportToggleTool {
public static override toolId = "FadeOut";
protected override async toggle(vp: Viewport, enable?: boolean): Promise<void> {
if (undefined === enable || enable !== vp.isFadeOutActive)
vp.isFadeOutActive = !vp.isFadeOutActive;
return Promise.resolve();
}
}
/** Sets the default tile size modifier used for all viewports that don't explicitly override it.
* @beta
*/
export class DefaultTileSizeModifierTool extends Tool {
public static override toolId = "DefaultTileSizeMod";
public static override get minArgs() { return 1; }
public static override get maxArgs() { return 1; }
/** This method runs the tool, setting the default tile size modifier used for all viewports that don't explicitly override it.
* @param modifier the tile size modifier to use; if undefined, do not set modifier
*/
public override async run(modifier?: number): Promise<boolean> {
if (undefined !== modifier)
IModelApp.tileAdmin.defaultTileSizeModifier = modifier;
return true;
}
/** Executes this tool's run method with args[0] containing `modifier`.
* @see [[run]]
*/
public override async parseAndRun(...args: string[]): Promise<boolean> {
return this.run(Number.parseFloat(args[0]));
}
}
/** Sets or clears the tile size modifier override for the selected viewport.
* @beta
*/
export class ViewportTileSizeModifierTool extends Tool {
public static override toolId = "ViewportTileSizeMod";
public static override get minArgs() { return 1; }
public static override get maxArgs() { return 1; }
/** This method runs the tool, setting the tile size modifier used for the selected viewport.
* @param modifier the tile size modifier to use; if undefined, reset the modifier
*/
public override async run(modifier?: number): Promise<boolean> {
const vp = IModelApp.viewManager.selectedView;
if (undefined !== vp)
vp.setTileSizeModifier(modifier);
return true;
}
/** Executes this tool's run method with args[0] containing the `modifier` argument or the string "reset" in order to reset the modifier.
* @see [[run]]
*/
public override async parseAndRun(...args: string[]): Promise<boolean> {
const arg = args[0].toLowerCase();
const modifier = "reset" === arg ? undefined : Number.parseFloat(args[0]);
return this.run(modifier);
}
}
/** This tool adds a reality model to the viewport.
* @beta
*/
export class ViewportAddRealityModel extends Tool {
public static override toolId = "ViewportAddRealityModel";
public static override get minArgs() { return 1; }
public static override get maxArgs() { return 1; }
/** This method runs the tool, adding a reality model to the viewport
* @param url the URL which points to the reality model tileset
*/
public override async run(url: string): Promise<boolean> {
const vp = IModelApp.viewManager.selectedView;
if (undefined !== vp)
vp.displayStyle.attachRealityModel({ tilesetUrl: url });
return true;
}
/** Executes this tool's run method with args[0] containing the `url` argument.
* @see [[run]]
*/
public override async parseAndRun(...args: string[]): Promise<boolean> {
return this.run(args[0]);
}
}
/** Changes the `allow3dManipulations` flag for the selected viewport if the viewport is displaying a `ViewState3d`.
* @beta
*/
export class Toggle3dManipulationsTool extends ViewportToggleTool {
public static override toolId = "Toggle3dManipulations";
protected override async toggle(vp: Viewport, allow?: boolean): Promise<void> {
if (!vp.view.is3d())
return Promise.resolve();
if (undefined === allow)
allow = !vp.view.allow3dManipulations();
if (allow !== vp.view.allow3dManipulations()) {
vp.view.setAllow3dManipulations(allow);
// eslint-disable-next-line @typescript-eslint/no-floating-promises
IModelApp.toolAdmin.startDefaultTool();
}
return Promise.resolve();
}
}
/** Toggles display of view attachments in sheet views.
* @beta
*/
export class ToggleViewAttachmentsTool extends ViewportToggleTool {
public static override toolId = "ToggleViewAttachments";
protected override async toggle(vp: Viewport, enable?: boolean): Promise<void> {
if (undefined === enable || enable !== vp.wantViewAttachments)
vp.wantViewAttachments = !vp.wantViewAttachments;
return Promise.resolve();
}
}
/** Toggle display of view attachment boundaries in sheet views.
* @beta
*/
export class ToggleViewAttachmentBoundariesTool extends ViewportToggleTool {
public static override toolId = "ToggleViewAttachmentBoundaries";
protected override async toggle(vp: Viewport, enable?: boolean): Promise<void> {
if (undefined === enable || enable !== vp.wantViewAttachmentBoundaries)
vp.wantViewAttachmentBoundaries = !vp.wantViewAttachmentBoundaries;
return Promise.resolve();
}
}
/** Toggle display of view attachment clip shapes in sheet views.
* @beta
*/
export class ToggleViewAttachmentClipShapesTool extends ViewportToggleTool {
public static override toolId = "ToggleViewAttachmentClipShapes";
protected override async toggle(vp: Viewport, enable?: boolean): Promise<void> {
if (undefined === enable || enable !== vp.wantViewAttachmentClipShapes)
vp.wantViewAttachmentClipShapes = !vp.wantViewAttachmentClipShapes;
return Promise.resolve();
}
}
/** Toggles display of 2d graphics in a [DrawingViewState]($frontend). This setting affects all drawing views until it is reset.
* @beta
*/
export class ToggleDrawingGraphicsTool extends ViewportToggleTool {
public static override toolId = "ToggleDrawingGraphics";
protected override async toggle(vp: Viewport, enable?: boolean): Promise<void> {
if (undefined === enable || enable !== DrawingViewState.hideDrawingGraphics) {
DrawingViewState.hideDrawingGraphics = !DrawingViewState.hideDrawingGraphics;
vp.invalidateScene();
}
return Promise.resolve();
}
}
/** Toggles whether a [SectionDrawing]($backend)'s spatial view is always displayed along with the 2d graphics by a [DrawingViewState]($frontend), even
* if it otherwise would not be. This setting affects all section drawing views until it is reset.
* @beta
*/
export class ToggleSectionDrawingSpatialViewTool extends ViewportToggleTool {
public static override toolId = "ToggleSectionDrawingSpatialView";
protected async toggle(vp: Viewport, enable?: boolean): Promise<void> {
if (undefined === enable || enable !== DrawingViewState.alwaysDisplaySpatialView) {
DrawingViewState.alwaysDisplaySpatialView = !DrawingViewState.alwaysDisplaySpatialView;
if (vp.view instanceof DrawingViewState) {
// Force the view to update its section drawing attachment.
const view = vp.view.clone();
await view.changeViewedModel(view.baseModelId);
await view.load();
vp.changeView(view);
}
}
}
}
/** Change the camera settings of the selected viewport.
* @beta
*/
export class ChangeCameraTool extends Tool {
public static override get minArgs() { return 1; }
public static override get maxArgs() { return 2; }
public static override toolId = "ChangeCamera";
public override async run(camera?: Camera): Promise<boolean> {
const vp = IModelApp.viewManager.selectedView;
if (camera && vp && vp.view.is3d()) {
const view = vp.view.clone();
view.camera.setFrom(camera);
vp.changeView(view);
}
return true;
}
public override async parseAndRun(...inArgs: string[]): Promise<boolean> {
const vp = IModelApp.viewManager.selectedView;
if (!vp || !vp.view.is3d())
return false;
const camera = vp.view.camera.clone();
const args = parseArgs(inArgs);
const lens = args.getFloat("l");
if (undefined !== lens)
camera.lens.setDegrees(lens);
const focusDist = args.getFloat("d");
if (undefined !== focusDist)
camera.focusDist = focusDist;
return this.run(camera);
}
} | the_stack |
import * as child_process from 'child_process';
import * as vscode from 'vscode';
import { getStackIdeTargets } from '../utils';
import * as hie from './hie-bios';
import * as path from 'path';
/**
* The key of a configuration, if applicable. Designed to be deterministically
* serializable (see `configKeyToString`).
*/
export interface ConfigKey {
type: string;
[k: string]: string;
}
/**
* Deterministically serialize a ConfigKey to a string, for use in equality
* comparison and tables. The format is compatible with JSON.
*
* @param key Key object to serialize
*/
export function configKeyToString(key: ConfigKey): string {
const keys = Object.keys(key).sort();
const fmt = JSON.stringify.bind(JSON);
const gen = (k: string) => `${fmt(k)}:${fmt(key[k])}`;
return '{' + keys.map(gen).join(',') + '}';
}
export interface Configuration {
/**
* Identifies whether the underlying GHCi process is sharable. If two
* configurations have a sharing key and the key is the same, they can share
* a GHCi.
*/
key: null | ConfigKey ;
/**
* The command to run, in the form of shell command string or argument list
*/
command: string | string[];
/**
* The directory to run command in
*/
cwd?: string,
/**
* When files listed here change, the configuration is invalidated
*/
dependencies: vscode.GlobPattern[];
}
const stackOptions = ["--no-terminal", "--color", "never"]
/** Detect if stack is available */
function hasStack(): Promise<boolean> {
const opts = { timeout: 5000 };
return new Promise<boolean>((resolve, reject) => {
child_process.exec(
'stack --help',
opts,
(err, stdout, stderr) => {
if (err) resolve(false);
else resolve(true);
}
)
});
}
/**
* Configuration for a single file
*
* @param cwd The working directory associated with the file
*/
async function singleConfig(cwd?: string): Promise<Configuration> {
if (await hasStack()) {
return {
key: null,
command: 'stack exec ghci',
cwd,
dependencies: []
};
} else {
return {
key: null,
command: 'ghci',
cwd,
dependencies: []
}
}
}
const alreadyShown = new Set();
function handleReplCommandTrust(
workspaceUri: vscode.Uri,
replCommand: string
): boolean {
if (workspaceUri.scheme !== 'file') return false;
const config = vscode.workspace.getConfiguration('ghcSimple', null);
const insp = config.inspect('trustedReplCommandConfigs').globalValue ?? {};
if (insp[workspaceUri.fsPath] === replCommand) {
return true;
} else {
if (! alreadyShown.has(workspaceUri.fsPath)) {
alreadyShown.add(workspaceUri.fsPath);
vscode.window.showWarningMessage(
`This workspace ${workspaceUri.fsPath} wants to run "${replCommand}" to start GHCi.\n\nAllow if you understand this and trust it.`,
'Allow', 'Ignore'
).then((value) => {
alreadyShown.delete(workspaceUri.fsPath);
if (value == 'Allow') {
const trusted = config.get('trustedReplCommandConfigs');
trusted[workspaceUri.fsPath] = replCommand;
config.update('trustedReplCommandConfigs', trusted, vscode.ConfigurationTarget.Global);
}
})
}
return false;
}
}
/** Configuration for a custom command */
async function customConfig(
replScope: 'workspace' | 'file',
replCommand: string,
workspaceUri: vscode.Uri
): Promise<Configuration | null> {
if (! handleReplCommandTrust(workspaceUri, replCommand))
return null;
if (replCommand.indexOf('$stack_ide_targets') !== -1) {
const sit = await getStackIdeTargets(workspaceUri);
replCommand.replace(/\$stack_ide_targets/g, sit.join(' '));
}
return {
key: replScope === 'file'
? null
: { type: 'custom-workspace', uri: workspaceUri.toString() },
cwd: workspaceUri.fsPath,
command: replCommand,
dependencies: []
};
}
function pathIsPrefix(a: string, b: string): boolean {
const aLevels = a.split(path.sep);
const bLevels = b.split(path.sep);
if (aLevels.length > bLevels.length) return false;
for (let i = 0; i < aLevels.length; i ++) {
if (aLevels[i] != bLevels[i]) return false;
}
return true;
}
async function hieBiosConfig(
workspace: vscode.WorkspaceFolder,
docUri: vscode.Uri
): Promise<Configuration | null> {
const hieConfig = await hie.getCradleConfig(workspace.uri);
const findMulti = <A>(multi: hie.Multi<A>): null | (A & hie.HasPath) => {
let found: null | (A & hie.HasPath) = null;
for (const cur of multi) {
const pathUri = vscode.Uri.joinPath(workspace.uri, cur.path);
if (! pathIsPrefix(pathUri.fsPath, docUri.fsPath)) {
continue;
}
if (found === null || pathIsPrefix(found.path, pathUri.fsPath)) {
found = cur;
}
}
return found;
}
const worker = (config: hie.HieConfig): Configuration => {
const makeCabalConfig = (component: hie.CabalComponent): Configuration => ({
key: {
type: 'hie-bios-cabal',
uri: workspace.uri.toString(),
component: component.component
},
cwd: workspace.uri.fsPath,
command: [ 'cabal', 'repl', '--', component.component ],
dependencies: [
... config.dependencies || [],
new vscode.RelativePattern(workspace, 'hie.yaml'),
new vscode.RelativePattern(workspace, '*.cabal')
]
});
const makeCabalNullConfig = (): Configuration => ({
key: {
type: 'hie-bios-cabal-null',
uri: workspace.uri.toString()
},
cwd: workspace.uri.fsPath,
command: [ 'cabal', 'repl' ],
dependencies: [
... config.dependencies || [],
new vscode.RelativePattern(workspace, 'stack.yaml'),
new vscode.RelativePattern(workspace, 'hie.yaml'),
new vscode.RelativePattern(workspace, '*.cabal')
]
});
const makeStackConfig = (
component: hie.StackComponent,
defaultStackYaml: string | null
): Configuration => {
const stackYaml = component.stackYaml || defaultStackYaml;
const stackYamlOpts = stackYaml ? [ '--stack-yaml', stackYaml ] : [];
const componentOpts = component.component ? [ component.component ] : [];
return {
key: {
type: 'hie-bios-stack',
uri: workspace.uri.toString(),
component: component.component
},
cwd: workspace.uri.fsPath,
command: [ 'stack', ...stackOptions, 'repl', '--no-load', ... stackYamlOpts, '--', ... componentOpts ],
dependencies: [
... config.dependencies || [],
stackYaml || new vscode.RelativePattern(workspace, 'stack.yaml'),
new vscode.RelativePattern(workspace, 'hie.yaml'),
new vscode.RelativePattern(workspace, '*.cabal'),
new vscode.RelativePattern(workspace, 'package.yaml')
]
}
};
const makeStackNullConfig = (): Configuration => {
return {
key: {
type: 'hie-bios-stack-null',
uri: workspace.uri.toString()
},
cwd: workspace.uri.fsPath,
command: [ 'stack', ...stackOptions, 'repl', '--no-load' ],
dependencies: [
... config.dependencies || [],
new vscode.RelativePattern(workspace, 'hie.yaml'),
new vscode.RelativePattern(workspace, '*.cabal'),
new vscode.RelativePattern(workspace, 'package.yaml')
]
}
};
const cradle = config.cradle;
if ('cabal' in cradle) {
const go = (components: hie.Multi<hie.CabalComponent>) => {
const res = findMulti(components);
if (res === null) {
return null;
} else {
return makeCabalConfig(res);
}
};
if (cradle.cabal === null) {
return makeCabalNullConfig();
} else if ('components' in cradle.cabal) {
return go(cradle.cabal.components);
} else if (Array.isArray(cradle.cabal)) {
return go(cradle.cabal);
} else {
return makeCabalConfig(cradle.cabal);
}
} else if ('stack' in cradle) {
const defaultStackYaml =
(cradle.stack && 'stackYaml' in cradle.stack) ? cradle.stack.stackYaml : null;
const go = (components: hie.Multi<hie.StackComponent>) => {
const res = findMulti(components);
if (res === null) {
return null;
} else {
return makeStackConfig(res, defaultStackYaml);
}
};
if (cradle.stack === null) {
return makeStackNullConfig();
} else if ('components' in cradle.stack) {
return go(cradle.stack.components);
} else if (Array.isArray(cradle.stack)) {
return go(cradle.stack);
} else {
return makeStackConfig(cradle.stack, defaultStackYaml);
}
} else if ('multi' in cradle) {
const res = findMulti(cradle.multi);
return worker(res.config);
} else if ('none' in cradle) {
return null;
}
};
return worker(hieConfig);
}
/** Detect the configuration of a `TextDocument` */
export async function fileConfig(docUri: vscode.Uri): Promise<Configuration | null> {
const workspace = vscode.workspace.getWorkspaceFolder(docUri);
if (! workspace) return singleConfig();
const config =
vscode.workspace.getConfiguration('ghcSimple', workspace.uri);
const replCommand: string = config.replCommand;
const replScope: 'workspace' | 'file' = config.replScope;
if (replCommand !== '') {
// Custom REPL command
return customConfig(replScope, replCommand, workspace.uri);
}
const find = async (pattern: string) =>
await vscode.workspace.findFiles(
new vscode.RelativePattern(workspace, pattern));
if ((await find('hie.yaml')).length > 0) {
// hie-bios cradle
return hieBiosConfig(workspace, docUri);
}
const makeCabalConfig = (): Configuration => ({
key: {
type: 'detect-cabal',
uri: workspace.uri.toString(),
},
cwd: workspace.uri.fsPath,
command: [ 'cabal', 'v2-repl', 'all' ],
dependencies: [
new vscode.RelativePattern(workspace, '*.cabal'),
new vscode.RelativePattern(workspace, 'package.yaml'),
new vscode.RelativePattern(workspace, 'cabal.project'),
new vscode.RelativePattern(workspace, 'cabal.project.local')
]
});
const makeStackConfig = (targets: string[]): Configuration => ({
key: {
type: 'detect-stack',
uri: workspace.uri.toString(),
},
cwd: workspace.uri.fsPath,
command: [ 'stack', ...stackOptions, 'repl', '--no-load', ... targets],
dependencies: [
new vscode.RelativePattern(workspace, '*.cabal'),
new vscode.RelativePattern(workspace, 'package.yaml'),
new vscode.RelativePattern(workspace, 'stack.yaml')
]
});
if ((await find('dist-newstyle')).length > 0) {
return makeCabalConfig();
}
if ((await find('.stack-work')).length > 0
|| (await find('stack.yaml')).length > 0) {
try {
const targets = await getStackIdeTargets(workspace.uri);
return makeStackConfig(targets);
} catch (e) {
console.error('Error detecting stack configuration:', e);
console.log('Trying others...');
}
}
if ((await find('*.cabal')).length > 0
|| (await find('cabal.project')).length > 0
|| (await find('cabal.project.local')).length > 0) {
return makeCabalConfig();
}
return singleConfig();
} | the_stack |
import { CombinedReportParameters } from 'accessibility-insights-report';
export const combinedResultsWithBaselineAwareIssues: CombinedReportParameters = {
serviceName: 'Mock Service Name',
axeVersion: 'mock.axe.version',
scanDetails: {
basePageTitle: 'Mock base without failures or unscannables',
baseUrl: 'https://example.com/mock-base-url',
scanStart: new Date(Date.UTC(2020, 1, 2, 3, 4, 5)), // Jan 2 2020, 03:04:05am
scanComplete: new Date(Date.UTC(2020, 1, 2, 4, 4, 5)), // Jan 2 2020, 04:04:05am
durationSeconds: 1 * 60 * 60,
},
userAgent: 'Mock user agent',
browserResolution: '1920x1080',
results: {
urlResults: {
failedUrls: 3,
passedUrls: 2,
unscannableUrls: 1,
},
resultsByRule: {
failed: [
{
key: 'failed-rule-1',
failed: [
{
urls: [
{ url: 'https://url/rule-1/failure-1', baselineStatus: 'existing' },
],
elementSelector: '.rule-1-selector-1',
snippet: '<div>snippet 1</div>',
fix: {
all: [],
any: [
{
data: null,
id: 'internal-link-present',
impact: 'serious',
message: 'No valid skip link found',
relatedNodes: [],
},
{
data: null,
id: 'header-present',
impact: 'serious',
message: 'Page does not have a header',
relatedNodes: [],
},
{
data: null,
id: 'landmark',
impact: 'serious',
message: 'Page does not have a landmark region',
relatedNodes: [],
},
],
failureSummary:
'Fix any of the following:\n No valid skip link found\n Page does not have a header\n Page does not have a landmark region',
none: [],
},
rule: {
description:
'Ensures each page has at least one mechanism for a user to bypass navigation and jump straight to the content',
ruleUrl:
'https://dequeuniversity.com/rules/axe/3.3/bypass?application=webdriverjs',
ruleId: 'bypass',
tags: [
'cat.keyboard',
'wcag2a',
'wcag241',
'section508',
'section508.22.o',
],
},
},
{
urls: [
{ url: 'https://url/rule-1/failure-1', baselineStatus: 'existing' },
{ url: 'https://url/rule1/failure-2', baselineStatus: 'new' },
],
elementSelector: '.rule-1-selector-2',
snippet: '<div>snippet 2</div>',
fix: {
all: [],
any: [
{
data: null,
id: 'internal-link-present',
impact: 'serious',
message: 'No valid skip link found',
relatedNodes: [],
},
{
data: null,
id: 'header-present',
impact: 'serious',
message: 'Page does not have a header',
relatedNodes: [],
},
{
data: null,
id: 'landmark',
impact: 'serious',
message: 'Page does not have a landmark region',
relatedNodes: [],
},
],
failureSummary:
'Fix any of the following:\n No valid skip link found\n Page does not have a header\n Page does not have a landmark region',
none: [],
},
rule: {
description:
'Ensures each page has at least one mechanism for a user to bypass navigation and jump straight to the content',
ruleUrl:
'https://dequeuniversity.com/rules/axe/3.3/bypass?application=webdriverjs',
ruleId: 'bypass',
tags: [
'cat.keyboard',
'wcag2a',
'wcag241',
'section508',
'section508.22.o',
],
},
},
],
},
{
key: 'failed-rule-2',
failed: [
{
urls: [{ url: 'https://url/rule2/failure1', baselineStatus: 'new' }],
elementSelector: '.rule-2-selector-1',
snippet: '<div>snippet</div>',
fix: {
all: [],
any: [
{
data: {
bgColor: '#e7ecd8',
contrastRatio: 2.52,
expectedContrastRatio: '4.5:1',
fgColor: '#8a94a8',
fontSize: '12.0pt (16px)',
fontWeight: 'normal',
missingData: null,
},
id: 'color-contrast',
impact: 'serious',
message:
'Element has insufficient color contrast of 2.52 (foreground color: #8a94a8, background color: #e7ecd8, font size: 12.0pt (16px), font weight: normal). Expected contrast ratio of 4.5:1',
relatedNodes: [
{
html: '<li>',
target: ['#menu > li:nth-child(1)'],
},
],
},
],
none: [],
failureSummary:
'Fix any of the following:\n Element has insufficient color contrast of 2.52 (foreground color: #8a94a8, background color: #e7ecd8, font size: 12.0pt (16px), font weight: normal). Expected contrast ratio of 4.5:1',
},
rule: {
description:
'Ensures the contrast between foreground and background colors meets WCAG 2 AA contrast ratio thresholds',
ruleUrl:
'https://dequeuniversity.com/rules/axe/3.3/color-contrast?application=webdriverjs',
ruleId: 'color-contrast',
tags: ['cat.color', 'wcag2aa', 'wcag143'],
},
},
],
},
],
passed: [
{
description: 'Ensures <area> elements of image maps have alternate text',
ruleUrl:
'https://dequeuniversity.com/rules/axe/3.3/area-alt?application=webdriverjs',
ruleId: 'area-alt',
tags: [
'cat.text-alternatives',
'wcag2a',
'wcag111',
'section508',
'section508.22.a',
],
},
{
description: 'Ensures aria-hidden elements do not contain focusable elements',
ruleUrl:
'https://dequeuniversity.com/rules/axe/3.3/aria-hidden-focus?application=webdriverjs',
ruleId: 'aria-hidden-focus',
tags: ['cat.name-role-value', 'wcag2a', 'wcag412', 'wcag131'],
},
],
notApplicable: [
{
description: 'Ensures every ARIA input field has an accessible name',
ruleUrl:
'https://dequeuniversity.com/rules/axe/3.3/aria-input-field-name?application=webdriverjs',
ruleId: 'aria-input-field-name',
tags: ['wcag2a', 'wcag412'],
},
{
description: 'Ensures every ARIA toggle field has an accessible name',
ruleUrl:
'https://dequeuniversity.com/rules/axe/3.3/aria-toggle-field-name?application=webdriverjs',
ruleId: 'aria-toggle-field-name',
tags: ['wcag2a', 'wcag412'],
},
{
description:
'Ensure the autocomplete attribute is correct and suitable for the form field',
ruleUrl:
'https://dequeuniversity.com/rules/axe/3.3/autocomplete-valid?application=webdriverjs',
ruleId: 'autocomplete-valid',
tags: ['cat.forms', 'wcag21aa', 'wcag135'],
},
],
},
},
}; | the_stack |
import {
Selector,
SelectorType,
AttributeSelector,
Traversal,
AttributeAction,
TraversalType,
DataType,
} from "./types";
const reName = /^[^\\#]?(?:\\(?:[\da-f]{1,6}\s?|.)|[\w\-\u00b0-\uFFFF])+/;
const reEscape = /\\([\da-f]{1,6}\s?|(\s)|.)/gi;
const enum CharCode {
LeftParenthesis = 40,
RightParenthesis = 41,
LeftSquareBracket = 91,
RightSquareBracket = 93,
Comma = 44,
Period = 46,
Colon = 58,
SingleQuote = 39,
DoubleQuote = 34,
Plus = 43,
Tilde = 126,
QuestionMark = 63,
ExclamationMark = 33,
Slash = 47,
Star = 42,
Equal = 61,
Dollar = 36,
Pipe = 124,
Circumflex = 94,
Asterisk = 42,
GreaterThan = 62,
LessThan = 60,
Hash = 35,
LowerI = 105,
LowerS = 115,
BackSlash = 92,
// Whitespace
Space = 32,
Tab = 9,
NewLine = 10,
FormFeed = 12,
CarriageReturn = 13,
}
const actionTypes = new Map<number, AttributeAction>([
[CharCode.Tilde, AttributeAction.Element],
[CharCode.Circumflex, AttributeAction.Start],
[CharCode.Dollar, AttributeAction.End],
[CharCode.Asterisk, AttributeAction.Any],
[CharCode.ExclamationMark, AttributeAction.Not],
[CharCode.Pipe, AttributeAction.Hyphen],
]);
// Pseudos, whose data property is parsed as well.
const unpackPseudos = new Set([
"has",
"not",
"matches",
"is",
"where",
"host",
"host-context",
]);
/**
* Checks whether a specific selector is a traversal.
* This is useful eg. in swapping the order of elements that
* are not traversals.
*
* @param selector Selector to check.
*/
export function isTraversal(selector: Selector): selector is Traversal {
switch (selector.type) {
case SelectorType.Adjacent:
case SelectorType.Child:
case SelectorType.Descendant:
case SelectorType.Parent:
case SelectorType.Sibling:
case SelectorType.ColumnCombinator:
return true;
default:
return false;
}
}
const stripQuotesFromPseudos = new Set(["contains", "icontains"]);
// Unescape function taken from https://github.com/jquery/sizzle/blob/master/src/sizzle.js#L152
function funescape(_: string, escaped: string, escapedWhitespace?: string) {
const high = parseInt(escaped, 16) - 0x10000;
// NaN means non-codepoint
return high !== high || escapedWhitespace
? escaped
: high < 0
? // BMP codepoint
String.fromCharCode(high + 0x10000)
: // Supplemental Plane codepoint (surrogate pair)
String.fromCharCode((high >> 10) | 0xd800, (high & 0x3ff) | 0xdc00);
}
function unescapeCSS(str: string) {
return str.replace(reEscape, funescape);
}
function isQuote(c: number): boolean {
return c === CharCode.SingleQuote || c === CharCode.DoubleQuote;
}
function isWhitespace(c: number): boolean {
return (
c === CharCode.Space ||
c === CharCode.Tab ||
c === CharCode.NewLine ||
c === CharCode.FormFeed ||
c === CharCode.CarriageReturn
);
}
/**
* Parses `selector`, optionally with the passed `options`.
*
* @param selector Selector to parse.
* @param options Options for parsing.
* @returns Returns a two-dimensional array.
* The first dimension represents selectors separated by commas (eg. `sub1, sub2`),
* the second contains the relevant tokens for that selector.
*/
export function parse(selector: string): Selector[][] {
const subselects: Selector[][] = [];
const endIndex = parseSelector(subselects, `${selector}`, 0);
if (endIndex < selector.length) {
throw new Error(`Unmatched selector: ${selector.slice(endIndex)}`);
}
return subselects;
}
function parseSelector(
subselects: Selector[][],
selector: string,
selectorIndex: number
): number {
let tokens: Selector[] = [];
function getName(offset: number): string {
const match = selector.slice(selectorIndex + offset).match(reName);
if (!match) {
throw new Error(
`Expected name, found ${selector.slice(selectorIndex)}`
);
}
const [name] = match;
selectorIndex += offset + name.length;
return unescapeCSS(name);
}
function stripWhitespace(offset: number) {
selectorIndex += offset;
while (
selectorIndex < selector.length &&
isWhitespace(selector.charCodeAt(selectorIndex))
) {
selectorIndex++;
}
}
function isEscaped(pos: number): boolean {
let slashCount = 0;
while (selector.charCodeAt(--pos) === CharCode.BackSlash) slashCount++;
return (slashCount & 1) === 1;
}
function ensureNotTraversal() {
if (tokens.length > 0 && isTraversal(tokens[tokens.length - 1])) {
throw new Error("Did not expect successive traversals.");
}
}
function addTraversal(type: TraversalType) {
if (
tokens.length > 0 &&
tokens[tokens.length - 1].type === SelectorType.Descendant
) {
tokens[tokens.length - 1].type = type;
return;
}
ensureNotTraversal();
tokens.push({ type });
}
function addSpecialAttribute(name: string, action: AttributeAction) {
tokens.push({
type: SelectorType.Attribute,
name,
action,
value: getName(1),
namespace: null,
ignoreCase: "quirks",
});
}
/**
* We have finished parsing the current part of the selector.
*
* Remove descendant tokens at the end if they exist,
* and return the last index, so that parsing can be
* picked up from here.
*/
function finalizeSubselector() {
if (
tokens.length &&
tokens[tokens.length - 1].type === SelectorType.Descendant
) {
tokens.pop();
}
if (tokens.length === 0) {
throw new Error("Empty sub-selector");
}
subselects.push(tokens);
}
stripWhitespace(0);
if (selector.length === selectorIndex) {
return selectorIndex;
}
loop: while (selectorIndex < selector.length) {
const firstChar = selector.charCodeAt(selectorIndex);
switch (firstChar) {
// Whitespace
case CharCode.Space:
case CharCode.Tab:
case CharCode.NewLine:
case CharCode.FormFeed:
case CharCode.CarriageReturn: {
if (
tokens.length === 0 ||
tokens[0].type !== SelectorType.Descendant
) {
ensureNotTraversal();
tokens.push({ type: SelectorType.Descendant });
}
stripWhitespace(1);
break;
}
// Traversals
case CharCode.GreaterThan: {
addTraversal(SelectorType.Child);
stripWhitespace(1);
break;
}
case CharCode.LessThan: {
addTraversal(SelectorType.Parent);
stripWhitespace(1);
break;
}
case CharCode.Tilde: {
addTraversal(SelectorType.Sibling);
stripWhitespace(1);
break;
}
case CharCode.Plus: {
addTraversal(SelectorType.Adjacent);
stripWhitespace(1);
break;
}
// Special attribute selectors: .class, #id
case CharCode.Period: {
addSpecialAttribute("class", AttributeAction.Element);
break;
}
case CharCode.Hash: {
addSpecialAttribute("id", AttributeAction.Equals);
break;
}
case CharCode.LeftSquareBracket: {
stripWhitespace(1);
// Determine attribute name and namespace
let name: string;
let namespace: string | null = null;
if (selector.charCodeAt(selectorIndex) === CharCode.Pipe) {
// Equivalent to no namespace
name = getName(1);
} else if (selector.startsWith("*|", selectorIndex)) {
namespace = "*";
name = getName(2);
} else {
name = getName(0);
if (
selector.charCodeAt(selectorIndex) === CharCode.Pipe &&
selector.charCodeAt(selectorIndex + 1) !==
CharCode.Equal
) {
namespace = name;
name = getName(1);
}
}
stripWhitespace(0);
// Determine comparison operation
let action: AttributeAction = AttributeAction.Exists;
const possibleAction = actionTypes.get(
selector.charCodeAt(selectorIndex)
);
if (possibleAction) {
action = possibleAction;
if (
selector.charCodeAt(selectorIndex + 1) !==
CharCode.Equal
) {
throw new Error("Expected `=`");
}
stripWhitespace(2);
} else if (
selector.charCodeAt(selectorIndex) === CharCode.Equal
) {
action = AttributeAction.Equals;
stripWhitespace(1);
}
// Determine value
let value = "";
let ignoreCase: boolean | null = null;
if (action !== "exists") {
if (isQuote(selector.charCodeAt(selectorIndex))) {
const quote = selector.charCodeAt(selectorIndex);
let sectionEnd = selectorIndex + 1;
while (
sectionEnd < selector.length &&
(selector.charCodeAt(sectionEnd) !== quote ||
isEscaped(sectionEnd))
) {
sectionEnd += 1;
}
if (selector.charCodeAt(sectionEnd) !== quote) {
throw new Error("Attribute value didn't end");
}
value = unescapeCSS(
selector.slice(selectorIndex + 1, sectionEnd)
);
selectorIndex = sectionEnd + 1;
} else {
const valueStart = selectorIndex;
while (
selectorIndex < selector.length &&
((!isWhitespace(
selector.charCodeAt(selectorIndex)
) &&
selector.charCodeAt(selectorIndex) !==
CharCode.RightSquareBracket) ||
isEscaped(selectorIndex))
) {
selectorIndex += 1;
}
value = unescapeCSS(
selector.slice(valueStart, selectorIndex)
);
}
stripWhitespace(0);
// See if we have a force ignore flag
const forceIgnore =
selector.charCodeAt(selectorIndex) | 0x20;
// If the forceIgnore flag is set (either `i` or `s`), use that value
if (forceIgnore === CharCode.LowerS) {
ignoreCase = false;
stripWhitespace(1);
} else if (forceIgnore === CharCode.LowerI) {
ignoreCase = true;
stripWhitespace(1);
}
}
if (
selector.charCodeAt(selectorIndex) !==
CharCode.RightSquareBracket
) {
throw new Error("Attribute selector didn't terminate");
}
selectorIndex += 1;
const attributeSelector: AttributeSelector = {
type: SelectorType.Attribute,
name,
action,
value,
namespace,
ignoreCase,
};
tokens.push(attributeSelector);
break;
}
case CharCode.Colon: {
if (selector.charCodeAt(selectorIndex + 1) === CharCode.Colon) {
tokens.push({
type: SelectorType.PseudoElement,
name: getName(2).toLowerCase(),
});
continue;
}
const name = getName(1).toLowerCase();
let data: DataType = null;
if (
selector.charCodeAt(selectorIndex) ===
CharCode.LeftParenthesis
) {
if (unpackPseudos.has(name)) {
if (isQuote(selector.charCodeAt(selectorIndex + 1))) {
throw new Error(
`Pseudo-selector ${name} cannot be quoted`
);
}
data = [];
selectorIndex = parseSelector(
data,
selector,
selectorIndex + 1
);
if (
selector.charCodeAt(selectorIndex) !==
CharCode.RightParenthesis
) {
throw new Error(
`Missing closing parenthesis in :${name} (${selector})`
);
}
selectorIndex += 1;
} else {
selectorIndex += 1;
const start = selectorIndex;
let counter = 1;
for (
;
counter > 0 && selectorIndex < selector.length;
selectorIndex++
) {
if (
selector.charCodeAt(selectorIndex) ===
CharCode.LeftParenthesis &&
!isEscaped(selectorIndex)
) {
counter++;
} else if (
selector.charCodeAt(selectorIndex) ===
CharCode.RightParenthesis &&
!isEscaped(selectorIndex)
) {
counter--;
}
}
if (counter) {
throw new Error("Parenthesis not matched");
}
data = selector.slice(start, selectorIndex - 1);
if (stripQuotesFromPseudos.has(name)) {
const quot = data.charCodeAt(0);
if (
quot === data.charCodeAt(data.length - 1) &&
isQuote(quot)
) {
data = data.slice(1, -1);
}
}
data = unescapeCSS(data);
}
}
tokens.push({ type: SelectorType.Pseudo, name, data });
break;
}
case CharCode.Comma: {
finalizeSubselector();
tokens = [];
stripWhitespace(1);
break;
}
default: {
if (selector.startsWith("/*", selectorIndex)) {
const endIndex = selector.indexOf("*/", selectorIndex + 2);
if (endIndex < 0) {
throw new Error("Comment was not terminated");
}
selectorIndex = endIndex + 2;
// Remove leading whitespace
if (tokens.length === 0) {
stripWhitespace(0);
}
break;
}
let namespace = null;
let name: string;
if (firstChar === CharCode.Asterisk) {
selectorIndex += 1;
name = "*";
} else if (firstChar === CharCode.Pipe) {
name = "";
if (
selector.charCodeAt(selectorIndex + 1) === CharCode.Pipe
) {
addTraversal(SelectorType.ColumnCombinator);
stripWhitespace(2);
break;
}
} else if (reName.test(selector.slice(selectorIndex))) {
name = getName(0);
} else {
break loop;
}
if (
selector.charCodeAt(selectorIndex) === CharCode.Pipe &&
selector.charCodeAt(selectorIndex + 1) !== CharCode.Pipe
) {
namespace = name;
if (
selector.charCodeAt(selectorIndex + 1) ===
CharCode.Asterisk
) {
name = "*";
selectorIndex += 2;
} else {
name = getName(1);
}
}
if (name === "*") {
tokens.push({ type: SelectorType.Universal, namespace });
} else {
tokens.push({ type: SelectorType.Tag, name, namespace });
}
}
}
}
finalizeSubselector();
return selectorIndex;
} | the_stack |
import test from 'japa'
import { join } from 'path'
import dedent from 'dedent-js'
import { Filesystem } from '@poppinss/dev-utils'
import * as tags from '../src/Tags'
import { Loader } from '../src/Loader'
import { Compiler } from '../src/Compiler'
import { Processor } from '../src/Processor'
const fs = new Filesystem(join(__dirname, 'views'))
const loader = new Loader()
loader.mount('default', fs.basePath)
const processor = new Processor()
const compiler = new Compiler(loader, tags, processor)
test.group('If tag', (group) => {
group.afterEach(async () => {
await fs.cleanup()
})
test('raise errors on correct line with if tag', async (assert) => {
assert.plan(2)
const templateContent = dedent`Hello everyone!
We are writing a bad if condition
@if(foo bar)
@endif`
await fs.add('foo.edge', templateContent)
try {
compiler.compile('foo')
} catch (error) {
assert.equal(error.message, 'Unexpected token ')
assert.equal(error.line, 4)
}
})
test('raise errors when using sequence expression', async (assert) => {
assert.plan(2)
const templateContent = dedent`Hello everyone!
We are writing a bad if condition
@if(foo, bar)
@endif`
await fs.add('foo.edge', templateContent)
try {
compiler.compile('foo')
} catch (error) {
assert.equal(error.line, 4)
assert.equal(error.message, '"foo, bar" is not a valid argument type for the @if tag')
}
})
test('raise errors when expression was never closed', async (assert) => {
assert.plan(1)
const templateContent = dedent`Hello everyone!
We are writing a bad if condition
@if(foo, bar)`
await fs.add('foo.edge', templateContent)
try {
compiler.compile('foo')
} catch (error) {
assert.equal(error.message, 'Unclosed tag if')
}
})
})
test.group('Include', (group) => {
group.afterEach(async () => {
await fs.cleanup()
})
test('raise errors on correct line with include tag', async (assert) => {
assert.plan(1)
const templateContent = dedent`We are writing a bad include condition
@include(foo bar)`
await fs.add('foo.edge', templateContent)
try {
compiler.compile('foo')
} catch (error) {
assert.equal(
error.stack.split('\n')[1],
` at anonymous (${join(fs.basePath, 'foo.edge')}:2:13)`
)
}
})
test('raise errors when using sequence expression', async (assert) => {
assert.plan(2)
const templateContent = "@include('foo', 'bar')"
await fs.add('foo.edge', templateContent)
try {
compiler.compile('foo')
} catch (error) {
assert.equal(
error.stack.split('\n')[1],
` at anonymous (${join(fs.basePath, 'foo.edge')}:1:9)`
)
assert.equal(
error.message,
"\"'foo', 'bar'\" is not a valid argument type for the @include tag"
)
}
})
})
test.group('IncludeIf', (group) => {
group.afterEach(async () => {
await fs.cleanup()
})
test('raise errors when not passing sequence expression', async (assert) => {
assert.plan(2)
const templateContent = dedent`We are writing a bad include condition
@includeIf(foo)`
await fs.add('foo.edge', templateContent)
try {
compiler.compile('foo')
} catch (error) {
assert.equal(error.message, '"foo" is not a valid argument type for the @includeIf tag')
assert.equal(
error.stack.split('\n')[1],
` at anonymous (${join(fs.basePath, 'foo.edge')}:2:11)`
)
}
})
test('raise errors when passing more than 2 arguments', async (assert) => {
assert.plan(2)
const templateContent = dedent`We are writing a bad include condition
@includeIf(foo, bar, baz)`
await fs.add('foo.edge', templateContent)
try {
compiler.compile('foo')
} catch (error) {
assert.equal(error.message, '@includeIf expects a total of 2 arguments')
assert.equal(
error.stack.split('\n')[1],
` at anonymous (${join(fs.basePath, 'foo.edge')}:2:11)`
)
}
})
test('raise errors when 1st argument is a sequence expression', async (assert) => {
assert.plan(2)
const templateContent = dedent`We are writing a bad include condition
@includeIf((foo, bar), baz)`
await fs.add('foo.edge', templateContent)
try {
compiler.compile('foo')
} catch (error) {
assert.equal(
error.message,
'"SequenceExpression" is not a valid 1st argument type for the @includeIf tag'
)
assert.equal(
error.stack.split('\n')[1],
` at anonymous (${join(fs.basePath, 'foo.edge')}:2:12)`
)
}
})
})
test.group('Component', (group) => {
group.afterEach(async () => {
await fs.cleanup()
})
test('raise errors when slot name is not defined as a literal', async (assert) => {
assert.plan(2)
const templateContent = dedent`@component('bar')
@slot(hello)
@endslot
@endcomponent`
await fs.add('foo.edge', templateContent)
try {
compiler.compile('foo')
} catch (error) {
assert.equal(
error.stack.split('\n')[1],
` at anonymous (${join(fs.basePath, 'foo.edge')}:2:6)`
)
assert.equal(error.message, '"hello" is not a valid argument type for the @slot tag')
}
})
test('raise errors when slot has more than 2 arguments', async (assert) => {
assert.plan(2)
const templateContent = dedent`@component('bar')
@slot('hello', props, propsAgain)
@endslot
@endcomponent`
await fs.add('foo.edge', templateContent)
try {
compiler.compile('foo')
} catch (error) {
assert.equal(
error.stack.split('\n')[1],
` at anonymous (${join(fs.basePath, 'foo.edge')}:2:6)`
)
assert.equal(error.message, 'maximum of 2 arguments are allowed for @slot tag')
}
})
test('raise errors when slot first argument is not a literal', async (assert) => {
assert.plan(2)
const templateContent = dedent`@component('bar')
@slot(hello, props)
@endslot
@endcomponent`
await fs.add('foo.edge', templateContent)
try {
compiler.compile('foo')
} catch (error) {
assert.equal(
error.stack.split('\n')[1],
` at anonymous (${join(fs.basePath, 'foo.edge')}:2:6)`
)
assert.equal(error.message, 'slot name must be a valid string literal')
}
})
test('raise errors when slot 2nd argument is not an identifier', async (assert) => {
assert.plan(2)
const templateContent = dedent`@component('bar')
@slot(
'hello',
'props'
)
@endslot
@endcomponent`
await fs.add('foo.edge', templateContent)
try {
compiler.compile('foo')
} catch (error) {
assert.equal(
error.stack.split('\n')[1],
` at anonymous (${join(fs.basePath, 'foo.edge')}:4:2)`
)
assert.equal(error.message, '"\'props\'" is not valid prop identifier for @slot tag')
}
})
test('raise error when slot is inside a conditional block', async (assert) => {
assert.plan(2)
const templateContent = dedent`@component('bar')
@if(username)
@slot('header')
@endslot
@endif
@endcomponent`
await fs.add('foo.edge', templateContent)
try {
compiler.compile('foo')
} catch (error) {
assert.equal(
error.stack.split('\n')[1],
` at anonymous (${join(fs.basePath, 'foo.edge')}:4:8)`
)
assert.equal(
error.message,
'@slot tag must appear as top level tag inside the @component tag'
)
}
})
})
test.group('Layouts', (group) => {
group.afterEach(async () => {
await fs.cleanup()
})
test('raise error when section is nested inside conditional block', async (assert) => {
assert.plan(2)
const templateContent = dedent`@layout('master')
@if(username)
@section('body')
<p> Hello world </p>
@endsection
@endif`
await fs.add('foo.edge', templateContent)
await fs.add('master.edge', "@!section('body')")
try {
compiler.compile('foo')
} catch (error) {
assert.equal(
error.stack.split('\n')[1],
` at anonymous (${join(fs.basePath, 'foo.edge')}:3:4)`
)
assert.equal(
error.message,
'Template extending a layout can only use "@section" or "@set" tags as top level nodes'
)
}
})
test('raise error when section is not a top level tag inside nested layouts', async (assert) => {
assert.plan(2)
const templateContent = dedent`@layout('master')
@if(username)
@section('body')
<p> Hello world </p>
@endsection
@endif`
await fs.add('foo.edge', templateContent)
await fs.add('master.edge', "@layout('super')")
await fs.add('super.edge', "@!section('body')")
try {
compiler.compile('foo')
} catch (error) {
assert.equal(
error.stack.split('\n')[1],
` at anonymous (${join(fs.basePath, 'foo.edge')}:3:4)`
)
assert.equal(
error.message,
'Template extending a layout can only use "@section" or "@set" tags as top level nodes'
)
}
})
test('raise error when there are raw content outside sections', async (assert) => {
assert.plan(2)
const templateContent = dedent`@layout('master')
<p> Hello world </p>`
await fs.add('foo.edge', templateContent)
await fs.add('master.edge', "@!section('body')")
try {
compiler.compile('foo')
} catch (error) {
assert.equal(
error.stack.split('\n')[1],
` at anonymous (${join(fs.basePath, 'foo.edge')}:2:0)`
)
assert.equal(
error.message,
'Template extending a layout can only use "@section" or "@set" tags as top level nodes'
)
}
})
})
test.group('Each tag', (group) => {
group.afterEach(async () => {
await fs.cleanup()
})
test('raise errors when arg is not a binary expression', async (assert) => {
assert.plan(2)
const templateContent = dedent`Hello everyone!
We are writing a bad each condition
@each(users)
@endeach`
await fs.add('foo.edge', templateContent)
try {
compiler.compile('foo')
} catch (error) {
assert.equal(error.message, '"users" is not valid expression for the @each tag')
assert.equal(error.line, 4)
}
})
test('raise errors when lhs of binary expression is not an indentifier', async (assert) => {
assert.plan(2)
const templateContent = dedent`Hello everyone!
We are writing a bad each condition
@each('user' in users)
@endeach`
await fs.add('foo.edge', templateContent)
try {
compiler.compile('foo')
} catch (error) {
assert.equal(error.message, 'invalid left hand side "Literal" expression for the @each tag')
assert.equal(error.line, 4)
}
})
}) | the_stack |
import {Component, Inject, OnInit} from "@angular/core";
import {VERSIONS_LINK} from "../../model/feed-link-constants";
import {AbstractLoadFeedComponent} from "../../shared/AbstractLoadFeedComponent";
import {FeedSideNavService} from "../../services/feed-side-nav.service";
import {FeedLoadingService} from "../../services/feed-loading-service";
import {DefineFeedService} from "../../services/define-feed.service";
import {StateService} from "@uirouter/angular";
import {KyloIcons} from "../../../../../kylo-utils/kylo-icons";
import {FeedService} from "../../../../services/FeedService";
import {fromPromise} from "rxjs/observable/fromPromise";
import * as _ from "underscore";
import {forkJoin} from "rxjs/observable/forkJoin";
import {CloneUtil} from "../../../../../common/utils/clone-util";
import {HttpClient} from "@angular/common/http";
import {RestUrlService} from "../../../../services/RestUrlService";
import {RestUrlConstants} from "../../../../services/RestUrlConstants";
import {Feed} from "../../../../model/feed/feed.model";
import {FeedNifiPropertiesService} from "../../services/feed-nifi-properties.service";
import {DomainType, DomainTypesService} from "../../../../services/DomainTypesService";
import {FeedFieldPolicyRuleService} from "../../../../shared/feed-field-policy-rules/FeedFieldPolicyRuleDialog";
@Component({
selector: "feed-versions",
styleUrls: ["./feed-versions.component.scss"],
templateUrl: "./feed-versions.component.html"
})
export class FeedVersionsComponent extends AbstractLoadFeedComponent implements OnInit {
static LINK_NAME = VERSIONS_LINK;
public kyloIcons_Links_versions= KyloIcons.Links.versions;
historyVersions: any;
feedService: FeedService;
restUrlService: RestUrlService;
feedNifiPropertiesService: FeedNifiPropertiesService;
domainTypesService: DomainTypesService;
feedFieldPolicyRuleService: FeedFieldPolicyRuleService;
http: HttpClient;
leftVersion: any;
rightVersion: any;
loading: any;
leftFeed: any;
rightFeed: any;
userProperties: any[];
isClustered: boolean = true;
supportsExecutionNode: boolean = true;
toolTipPosition: string = 'left';
securityGroupsEnabled: boolean = false;
fieldNameMap: any;
propertiesMap: any[];
availableDomainTypes: Array<DomainType> = [];
constructor(http: HttpClient,
feedLoadingService: FeedLoadingService,
stateService: StateService,
defineFeedService: DefineFeedService,
feedSideNavService: FeedSideNavService,
feedNifiPropertiesService: FeedNifiPropertiesService,
@Inject("FeedService") feedService: FeedService,
@Inject("RestUrlService") restUrlService: RestUrlService,
domainTypesService: DomainTypesService,
@Inject("FeedFieldPolicyRuleService") feedFieldPolicyRuleService: FeedFieldPolicyRuleService) {
super(feedLoadingService, stateService, defineFeedService, feedSideNavService);
this.feedService = feedService;
this.restUrlService = restUrlService;
this.http = http;
this.feedNifiPropertiesService = feedNifiPropertiesService;
this.domainTypesService = domainTypesService;
this.feedFieldPolicyRuleService = feedFieldPolicyRuleService;
this.loading = false;
}
getLinkName() {
return FeedVersionsComponent.LINK_NAME;
}
init() {
//console.log(this.feed);
this.loadHistoryVersions();
this.populateFieldNameMap();
}
loadHistoryVersions() {
return fromPromise(this.feedService.getFeedVersions(this.feed.id))
.subscribe((result: any) => {
this.historyVersions = result.versions;
this.leftVersion = this.getCurrentVersion();
//console.log('Total history versions loaded: ' + this.historyVersions.length);
}, (err: any) => {
console.log("Error retrieving history versions of feed");
});
}
getCurrentVersion(): any {
return this.historyVersions[0];
}
changeRightVersion(version: any) {
this.rightVersion = version;
this.loading = true;
this.propertiesMap = [];
let diffObservable = fromPromise(this.feedService.diffFeedVersions(this.feed.id, this.rightVersion.id, this.leftVersion.id));
let nifiClusterStatusObservable = this.http.get(this.restUrlService.NIFI_STATUS);
let securityGroupsEnabledObservable = this.http.get(this.restUrlService.HADOOP_AUTHORIZATATION_BASE_URL + "/enabled");
let domainTypesObservable = fromPromise(this.domainTypesService.findAll());
forkJoin([diffObservable, nifiClusterStatusObservable, securityGroupsEnabledObservable, domainTypesObservable])
.subscribe(([diffResult, nifiClusterStatusResult, securityGroupsEnabledResult, domainTypesResult]) => {
this.availableDomainTypes = domainTypesResult;
domainTypesResult.forEach((domainType: any) => {
if (domainType && domainType.field) {
domainType.field.derivedDataType = null;
domainType.field.precisionScale = null;
}
});
if (!_.isUndefined(securityGroupsEnabledResult)
&& !_.isUndefined(securityGroupsEnabledResult.data)
&& !_.isUndefined(securityGroupsEnabledResult.data[0])) {
this.securityGroupsEnabled = securityGroupsEnabledResult.data[0].enabled;
}
this.isClustered = !_.isUndefined(nifiClusterStatusResult.clustered) && nifiClusterStatusResult.clustered;
this.supportsExecutionNode = this.isClustered && !_.isUndefined(nifiClusterStatusResult.version) && !nifiClusterStatusResult.version.match(/^0\.|^1\.0/);
this.feedService.versionFeedModelDiff = [];
_.each(diffResult.difference.patch, (patch: any) => {
this.feedService.versionFeedModelDiff[patch.path] = patch;
});
this.leftFeed = diffResult.fromVersion.entity;
this.rightFeed = diffResult.toVersion.entity;
this.populatePropertiesMap();
this.feedService.versionFeedModel = this.rightFeed;
this.feedService.versionFeedModel.version = this.rightVersion;
this.userProperties = [];
_.each(this.feedService.versionFeedModel.userProperties, (versionedProp) => {
let property: any = {};
property.versioned = CloneUtil.deepObjectCopy(versionedProp);
property.op = 'no-op';
property.systemName = property.versioned.systemName;
property.displayName = property.versioned.displayName;
property.description = property.versioned.description;
property.current = CloneUtil.deepObjectCopy(property.versioned);
this.userProperties.push(property);
});
_.each(_.values(this.feedService.versionFeedModelDiff), (diff) => {
if (diff.path.startsWith("/userProperties")) {
if (diff.path.startsWith("/userProperties/")) {
//individual versioned indexed action
let remainder = diff.path.substring("/userProperties/".length, diff.path.length);
let indexOfSlash = remainder.indexOf("/");
let versionedPropIdx = remainder.substring(0, indexOfSlash > 0 ? indexOfSlash : remainder.length);
if ("replace" === diff.op) {
let property = this.userProperties[versionedPropIdx];
property.op = diff.op;
let replacedPropertyName = remainder.substring(remainder.indexOf("/") + 1, remainder.length);
property.current[replacedPropertyName] = diff.value;
property[replacedPropertyName] = diff.value;
} else if ("add" === diff.op) {
if (_.isArray(diff.value)) {
_.each(diff.value, (prop) => {
this.userProperties.push(this.createProperty(prop, diff.op));
});
} else {
this.userProperties.unshift(this.createProperty(diff.value, diff.op));
}
} else if ("remove" === diff.op) {
let property = this.userProperties[versionedPropIdx];
property.op = diff.op;
property.current = {};
}
} else {
//group versioned action, can be either "add" or "remove"
if ("add" === diff.op) {
if (_.isArray(diff.value)) {
_.each(diff.value, (prop) => {
this.userProperties.push(this.createProperty(prop, diff.op));
});
} else {
this.userProperties.push(this.createProperty(diff.value, diff.op));
}
} else if ("remove" === diff.op) {
_.each(this.userProperties, (prop: any) => {
prop.op = diff.op;
prop.current = {};
});
}
}
}
});
this.loading = false;
}, (([diffError, nifiClusterStatusError, securityGroupsEnabledError, domainTypesError]) => {
console.log("Error obtaining feed version differences");
this.loading = false;
}));
}
historyVersionsAvailable(): boolean {
return (this.historyVersions != undefined);
}
diff(path: any) {
return this.feedService.diffOperation(path);
}
diffCollection(path: any) {
return this.feedService.diffCollectionOperation(path);
}
diffPolicies(policyIdx: any) {
return this.feedService.joinVersionOperations(this.feedService.diffCollectionOperation('/table/fieldPolicies/' + policyIdx + '/standardization'),
this.feedService.diffCollectionOperation('/table/fieldPolicies/' + policyIdx + '/validation'));
}
createProperty(original: any, operation: any) {
let property: any = {};
property.versioned = {};
property.current = CloneUtil.deepObjectCopy(original);
property.systemName = property.current.systemName;
property.displayName = property.current.displayName;
property.description = property.current.description;
property.op = operation;
return property;
}
interpretDiff(diffResult: string, isSensitive?: boolean): string {
if (isSensitive) {
return "This is a sensitive property";
}
if (diffResult) {
if (diffResult === 'add') {
return 'Value added';
} else if (diffResult === 'remove') {
return 'Value removed';
} else if (diffResult === 'replace') {
return 'Value updated';
}
}
return '';
}
mergeStrategyDisplayName(feed: any): any {
return this.leftFeed.table.targetMergeStrategy;
}
populateFieldNameMap() {
this.fieldNameMap = {};
_.each(this.feed.table.tableSchema.fields, (field) => {
this.fieldNameMap[field['name']] = field;
});
//console.log('Field map populated. Total entries: ' + Object.keys(this.fieldNameMap).length);
}
getDomainType(domainTypeId: any): any {
return _.find(this.availableDomainTypes, (domainType: any) => {
return (domainType.id === domainTypeId);
});
}
getAllFieldPolicies(fieldWithPolicies: any): any {
return this.feedFieldPolicyRuleService.getAllPolicyRules(fieldWithPolicies);
}
getAllVersionedFieldPolicies(policyIndex: any): any {
return this.getAllFieldPolicies(this.findVersionedPolicy(policyIndex));
}
private findVersionedPolicy(policyIndex: any) {
if (this.feedService.versionFeedModel && this.feedService.versionFeedModel.table && this.feedService.versionFeedModel.table.fieldPolicies) {
return this.feedService.versionFeedModel.table.fieldPolicies[policyIndex];
}
return '';
}
getInputProperties(inputProcessorName: any, inputProcessorType: any) {
return this.propertiesMap.filter(p => ((p.processorName == inputProcessorName) && (p.processorType == inputProcessorType)));
}
getNonInputProperties(inputProcessorName: any, inputProcessorType: any) {
return this.propertiesMap.filter(p => ((p.processorName != inputProcessorName) || (p.processorType != inputProcessorType)));
}
populatePropertiesMap() {
_.each(this.leftFeed.properties, (property: any) => {
let item: any = {};
item.nameKey = property.nameKey;
item.key = property.key;
item.processorName = property.processorName;
item.processorType = property.processorType;
item.processGroupName = property.processGroupName;
item.sensitive = property.sensitive;
item.leftValue = property.value;
item.rightValue = null;
this.propertiesMap.push(item);
});
_.each(this.rightFeed.properties, (property: any) => {
let existingItem:any = this.propertiesMap.find(item => item.nameKey == property.nameKey);
if (existingItem == null) {
let item: any = {};
item.nameKey = property.nameKey;
item.key = property.key;
item.processorName = property.processorName;
item.processorType = property.processorType;
item.processGroupName = property.processGroupName;
item.sensitive = property.sensitive;
item.leftValue = null;
item.rightValue = property.value;
this.propertiesMap.push(item);
} else {
existingItem.rightValue = property.value;
}
});
_.each(this.propertiesMap, (propertyMapItem) => {
if (((propertyMapItem.leftValue == null) && (propertyMapItem.rightValue == null)) || (propertyMapItem.leftValue == propertyMapItem.rightValue)) {
propertyMapItem.op = "no-change";
} else if (((propertyMapItem.leftValue == null) || (propertyMapItem.leftValue == ""))
&& ((propertyMapItem.rightValue != null) || (propertyMapItem.rightValue != ""))) {
propertyMapItem.op = "remove";
} else if (((propertyMapItem.leftValue != null) || (propertyMapItem.leftValue != ""))
&& ((propertyMapItem.rightValue == null) || (propertyMapItem.rightValue == ""))) {
propertyMapItem.op = "add";
} else if ((propertyMapItem.leftValue != propertyMapItem.rightValue)) {
propertyMapItem.op = "replace";
}
});
}
} | the_stack |
import { ObjectId, Binary } from 'mongodb';
import { expectType } from 'ts-expect';
import types from '../types';
enum TEST_ENUM {
FOO = 'foo',
BAR = 'bar',
}
describe('types', () => {
describe('simple types', () => {
describe('boolean', () => {
test('default', () => {
const value = types.boolean();
expect(value).toEqual({
type: 'boolean',
});
expectType<boolean | undefined>(value);
expectType<typeof value>(undefined);
});
test('required', () => {
const value = types.boolean({ required: true });
expect(value).toEqual({
$required: true,
type: 'boolean',
});
expectType<boolean>(value);
// @ts-expect-error `value` should not be undefined
expectType<typeof value>(undefined);
});
test('options', () => {
// @ts-expect-error invalid option
types.boolean({ maximum: 1 });
// @ts-expect-error invalid option
types.boolean({ maxLength: 1 });
});
});
describe('date', () => {
test('default', () => {
const value = types.date();
expect(value).toEqual({
bsonType: 'date',
});
expectType<Date | undefined>(value);
expectType<typeof value>(undefined);
});
test('required', () => {
const value = types.date({ required: true });
expect(value).toEqual({
$required: true,
bsonType: 'date',
});
expectType<Date>(value);
// @ts-expect-error `value` should not be undefined
expectType<typeof value>(undefined);
});
test('options', () => {
// @ts-expect-error invalid option
types.date({ maximum: 1 });
// @ts-expect-error invalid option
types.date({ maxLength: 1 });
});
});
describe('enum', () => {
test('default', () => {
const value = types.enum(Object.values(TEST_ENUM));
expect(value).toEqual({
enum: ['foo', 'bar'],
});
expectType<TEST_ENUM | undefined>(value);
expectType<typeof value>(TEST_ENUM.FOO);
expectType<typeof value>(undefined);
});
test('required', () => {
const value = types.enum([...Object.values(TEST_ENUM), null], { required: true });
expect(value).toEqual({
$required: true,
enum: ['foo', 'bar', null],
});
expectType<TEST_ENUM | null>(value);
expectType<typeof value>(TEST_ENUM.FOO);
expectType<typeof value>(null);
// @ts-expect-error `value` should not be undefined
expectType<typeof value>(undefined);
});
test('options', () => {
// @ts-expect-error invalid option
types.enum(Object.values(TEST_ENUM), { maximum: 1 });
// @ts-expect-error invalid option
types.enum(Object.values(TEST_ENUM), { maxLength: 1 });
});
test('array of const', () => {
const value = types.enum(['a' as const, 'b' as const]);
expect(value).toEqual({
enum: ['a', 'b'],
});
expectType<typeof value>('a');
// @ts-expect-error `value` can not be c
expectType<typeof value>('c');
expectType<typeof value>(undefined);
});
});
describe('number', () => {
test('default', () => {
const value = types.number();
expect(value).toEqual({
type: 'number',
});
expectType<number | undefined>(value);
expectType<typeof value>(undefined);
});
test('required', () => {
const value = types.number({ required: true });
expect(value).toEqual({
$required: true,
type: 'number',
});
expectType<number>(value);
// @ts-expect-error `value` should not be undefined
expectType<typeof value>(undefined);
});
test('options', () => {
types.number({
maximum: 9,
minimum: 2,
multipleOf: 2,
});
// @ts-expect-error invalid option
types.number({ maxLength: 1 });
});
});
describe('objectId', () => {
test('default', () => {
const value = types.objectId();
expect(value).toEqual({
bsonType: 'objectId',
});
expectType<ObjectId | undefined>(value);
expectType<typeof value>(undefined);
});
test('required', () => {
const value = types.objectId({ required: true });
expect(value).toEqual({
$required: true,
bsonType: 'objectId',
});
expectType<ObjectId>(value);
// @ts-expect-error `value` should not be undefined
expectType<typeof value>(undefined);
});
test('options', () => {
// @ts-expect-error invalid option
types.objectId({ maximum: 1 });
});
});
describe('binary', () => {
test('default', () => {
const value = types.binary();
expect(value).toEqual({
bsonType: 'binData',
});
expectType<Binary | undefined>(value);
expectType<typeof value>(undefined);
});
test('required', () => {
const value = types.binary({ required: true });
expect(value).toEqual({
$required: true,
bsonType: 'binData',
});
expectType<Binary>(value);
// @ts-expect-error `value` should not be undefined
expectType<typeof value>(undefined);
});
test('options', () => {
// @ts-expect-error invalid option
types.binary({ maximum: 1 });
});
});
describe('string', () => {
test('default', () => {
const value = types.string();
expect(value).toEqual({
type: 'string',
});
expectType<string | undefined>(value);
expectType<typeof value>(undefined);
});
test('pattern', () => {
const value = types.string({ pattern: '^foo' });
expect(value).toEqual({
pattern: '^foo',
type: 'string',
});
expectType<string | undefined>(value);
expectType<typeof value>(undefined);
});
test('required', () => {
const value = types.string({ required: true });
expect(value).toEqual({
$required: true,
type: 'string',
});
expectType<string>(value);
// @ts-expect-error `value` should not be undefined
expectType<typeof value>(undefined);
});
test('options', () => {
types.string({
enum: ['foo', 'bar'],
maxLength: 9,
minLength: 1,
});
// @ts-expect-error invalid option
types.string({ maximum: 1 });
});
});
});
describe('complex types', () => {
describe('any', () => {
test('default', () => {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const value = types.any();
expect(value).toEqual({
bsonType: [
'array',
'binData',
'bool',
'date',
'null',
'number',
'object',
'objectId',
'string',
],
});
// eslint-disable-next-line @typescript-eslint/no-explicit-any
expectType<any>(value);
expectType<typeof value>(undefined);
expectType<typeof value>('foo');
expectType<typeof value>(123);
});
test('required', () => {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const value = types.any({ required: true });
expect(value).toEqual({
$required: true,
bsonType: [
'array',
'binData',
'bool',
'date',
'null',
'number',
'object',
'objectId',
'string',
],
});
// eslint-disable-next-line @typescript-eslint/no-explicit-any
expectType<any>(value);
expectType<typeof value>(undefined);
expectType<typeof value>('foo');
expectType<typeof value>(123);
});
test('options', () => {
// @ts-expect-error invalid option
types.any({ maximum: 1 });
// @ts-expect-error invalid option
types.any({ maxLength: 1 });
});
});
describe('array', () => {
test('default', () => {
const value = types.array(types.boolean({ required: true }));
expect(value).toEqual({
items: {
$required: true,
type: 'boolean',
},
type: 'array',
});
expectType<boolean[] | undefined>(value);
expectType<typeof value>(undefined);
});
test('required', () => {
const value = types.array(types.string(), { required: true });
expect(value).toEqual({
$required: true,
items: {
type: 'string',
},
type: 'array',
});
expectType<(string | undefined)[]>(value);
// @ts-expect-error `value` should not be undefined
expectType<typeof value>(undefined);
});
test('options', () => {
const value = types.array(types.number(), {
maxItems: 9,
minItems: 1,
});
expect(value).toEqual({
items: {
type: 'number',
},
maxItems: 9,
minItems: 1,
type: 'array',
});
expectType<(number | undefined)[] | undefined>(value);
});
test('with object items', () => {
const value = types.array(
types.object(
{
foo: types.string(),
},
{ required: true }
),
{ required: true }
);
expect(value).toEqual({
$required: true,
items: {
$required: true,
additionalProperties: false,
properties: {
foo: {
type: 'string',
},
},
type: 'object',
},
type: 'array',
});
expectType<{ foo?: string }[]>(value);
});
});
describe('object', () => {
test('default', () => {
const value = types.object({
bar: types.number({ required: true }),
foo: types.boolean(),
});
expect(value).toEqual(
expect.objectContaining({
additionalProperties: false,
properties: {
bar: {
$required: true,
type: 'number',
},
foo: {
type: 'boolean',
},
},
required: ['bar'],
type: 'object',
})
);
expectType<
| {
foo?: boolean;
bar: number;
}
| undefined
>(value);
expectType<boolean | undefined>(value?.foo);
expectType<typeof value>({
bar: 123,
foo: true,
});
expectType<typeof value>({
bar: 123,
});
expectType<typeof value>(undefined);
});
test('required', () => {
const value = types.object(
{
foo: types.boolean(),
},
{ required: true }
);
expect(value).toEqual(
expect.objectContaining({
$required: true,
additionalProperties: false,
properties: {
foo: {
type: 'boolean',
},
},
type: 'object',
})
);
expectType<{ foo?: boolean }>(value);
// @ts-expect-error `value` should not be undefined
expectType<typeof value>(undefined);
});
test('options', () => {
const value = types.object(
{
foo: types.boolean(),
},
{
additionalProperties: true,
maxProperties: 3,
minProperties: 1,
}
);
expect(value).toEqual(
expect.objectContaining({
additionalProperties: true,
maxProperties: 3,
minProperties: 1,
properties: {
foo: {
type: 'boolean',
},
},
type: 'object',
})
);
expectType<
| {
foo?: boolean;
}
| undefined
>(value);
});
});
describe('generic object', () => {
test('default', () => {
const value = types.objectGeneric(
types.object(
{
bar: types.number({ required: true }),
foo: types.boolean(),
},
{ required: true }
)
);
expect(value).toEqual(
expect.objectContaining({
additionalProperties: false,
patternProperties: {
'.+': {
$required: true,
additionalProperties: false,
properties: {
bar: {
$required: true,
type: 'number',
},
foo: {
type: 'boolean',
},
},
required: ['bar'],
type: 'object',
},
},
type: 'object',
})
);
expectType<
| {
[key: string]: {
foo?: boolean;
bar: number;
};
}
| undefined
>(value);
const item = value?.item;
expectType<boolean | undefined>(item?.foo);
expectType<number | undefined>(item?.bar);
expectType<typeof item>({
bar: 123,
foo: true,
});
expectType<typeof item>({
bar: 123,
});
expectType<typeof item>(undefined);
});
test('required', () => {
const value = types.objectGeneric(types.number(), '^(foo|bar)$', { required: true });
expect(value).toEqual(
expect.objectContaining({
$required: true,
additionalProperties: false,
patternProperties: {
'^(foo|bar)$': {
type: 'number',
},
},
type: 'object',
})
);
expectType<{ [key: string]: number | undefined }>(value);
// @ts-expect-error `value` should not be undefined
expectType<typeof value>(undefined);
});
test('options', () => {
const value = types.objectGeneric(types.boolean(), '.+', {
additionalProperties: true,
maxProperties: 3,
minProperties: 1,
});
expect(value).toEqual(
expect.objectContaining({
additionalProperties: true,
maxProperties: 3,
minProperties: 1,
patternProperties: {
'.+': {
type: 'boolean',
},
},
type: 'object',
})
);
expectType<
| {
[key: string]: boolean | undefined;
}
| undefined
>(value);
});
});
});
}); | the_stack |
import { cellWidth, ICell, Table, TableBody, TableHeader, wrappable } from '@patternfly/react-table';
import { Criteria, Host, initCriteria, Iter8Info, NameValuePair } from '../../../../types/Iter8';
import * as React from 'react';
import {
Button,
Checkbox,
Form,
FormGroup,
FormSelect,
FormSelectOption,
TextInputBase as TextInput
} from '@patternfly/react-core';
import { style } from 'typestyle';
import { PFColors } from '../../../../components/Pf/PfColors';
import history from '../../../../app/History';
import { OnRemoveFromListOptions } from './ExperimentCreatePage';
const headerCells: ICell[] = [
{
title: 'Metric Name',
transforms: [cellWidth(30) as any],
props: {}
},
{
title: 'Threshold',
transforms: [wrappable, cellWidth(10) as any],
props: {}
},
{
title: 'Threshold Type',
transforms: [wrappable, cellWidth(15) as any],
props: {}
},
{
title: 'Stop on Failure',
transforms: [wrappable],
props: {}
},
{
title: 'Reward',
transforms: [wrappable],
props: {}
},
{
title: '',
props: {}
}
];
const toleranceType: NameValuePair[] = [
{
name: 'absolute',
value: 'absolute'
},
{
name: 'relative',
value: 'relative'
}
];
const noCriteriaStyle = style({
marginTop: 15,
marginBottom: 15,
color: PFColors.Blue400,
fontSize: '1.2em'
});
type Props = {
iter8Info: Iter8Info;
criterias: Criteria[];
metricNames: string[];
onAdd: (criteria: Criteria, host: Host, match: any) => void;
onRemove: (type: OnRemoveFromListOptions, index: number) => void;
};
type State = {
addCriteria: Criteria;
validName: boolean;
};
// Create Success Criteria, can be multiple with same metric, but different sampleSize, etc...
class ExperimentCriteriaForm extends React.Component<Props, State> {
constructor(props: Props) {
super(props);
this.state = {
addCriteria: initCriteria(),
validName: false
};
}
// @ts-ignore
actionResolver = (rowData, { rowIndex }) => {
const removeAction = {
title: 'Remove Criteria',
onClick: (_, rowIndex) => {
this.props.onRemove(OnRemoveFromListOptions.Criteria, rowIndex);
}
};
if (rowIndex < this.props.criterias.length) {
return [removeAction];
}
return [];
};
onAddMetricName = (value: string) => {
this.setState(prevState => ({
addCriteria: {
metric: value.trim(),
tolerance: prevState.addCriteria.tolerance,
toleranceType: prevState.addCriteria.toleranceType,
stopOnFailure: prevState.addCriteria.stopOnFailure,
isReward: prevState.addCriteria.isReward
},
validName: true
}));
};
onAddTolerance = (value: string, _) => {
this.setState(prevState => ({
addCriteria: {
metric: prevState.addCriteria.metric,
tolerance: parseFloat(value.trim()),
toleranceType: prevState.addCriteria.toleranceType,
stopOnFailure: prevState.addCriteria.stopOnFailure,
isReward: prevState.addCriteria.isReward
},
validName: true
}));
};
onAddToleranceType = (value: string, _) => {
this.setState(prevState => ({
addCriteria: {
metric: prevState.addCriteria.metric,
tolerance: prevState.addCriteria.tolerance,
toleranceType: value.trim(),
stopOnFailure: prevState.addCriteria.stopOnFailure,
isReward: prevState.addCriteria.isReward
},
validName: true
}));
};
onAddStopOnFailure = () => {
this.setState(prevState => ({
addCriteria: {
metric: prevState.addCriteria.metric,
tolerance: prevState.addCriteria.tolerance,
toleranceType: prevState.addCriteria.toleranceType,
stopOnFailure: !prevState.addCriteria.stopOnFailure,
isReward: prevState.addCriteria.isReward
},
validName: true
}));
};
onIsReward = () => {
this.setState(prevState => ({
addCriteria: {
metric: prevState.addCriteria.metric,
tolerance: prevState.addCriteria.tolerance,
toleranceType: prevState.addCriteria.toleranceType,
stopOnFailure: prevState.addCriteria.stopOnFailure,
isReward: !prevState.addCriteria.isReward
},
validName: true
}));
};
onAddCriteria = () => {
var NULL = null;
this.props.onAdd(this.state.addCriteria, { name: '', gateway: '' }, NULL);
this.setState({
addCriteria: initCriteria()
});
};
rows() {
if (history.location.pathname.endsWith('/new')) {
return this.props.criterias
.map((gw, i) => ({
key: 'criteria' + i,
cells: [
<>{gw.metric}</>,
<>{gw.tolerance}</>,
<>{gw.toleranceType}</>,
<>{gw.stopOnFailure ? 'true' : 'false'}</>,
<>{gw.isReward ? 'true' : 'false'}</>
]
}))
.concat([
{
key: 'gwNew',
cells: [
<>
<FormSelect
value={this.state.addCriteria.metric}
id="addMetricName"
name="addMetricName"
onChange={this.onAddMetricName}
>
{this.props.metricNames.map((option, index) => (
<FormSelectOption isDisabled={false} key={'p' + index} value={option} label={option} />
))}
</FormSelect>
</>,
<FormGroup fieldId="addTolerance" isValid={this.state.addCriteria.tolerance > 0}>
<TextInput
id="addTolerance"
type="number"
value={this.state.addCriteria.tolerance}
placeholder="Tolerance"
onChange={value => this.onAddTolerance(value, '')}
isValid={!isNaN(Number(this.state.addCriteria.tolerance))}
/>
</FormGroup>,
<>
<FormSelect
value={this.state.addCriteria.toleranceType}
id="addPortProtocol"
name="addPortProtocol"
onChange={this.onAddToleranceType}
>
{toleranceType.map((option, index) => (
<FormSelectOption isDisabled={false} key={'p' + index} value={option.name} label={option.value} />
))}
</FormSelect>
</>,
<>
<Checkbox
label="YES"
id="stopOnFailure"
name="stopOnFailure"
aria-label="Stop On Failure"
isChecked={this.state.addCriteria.stopOnFailure}
onChange={this.onAddStopOnFailure}
/>
</>,
<>
<Checkbox
label="YES"
id="isReward"
name="isReward"
aria-label="Reward"
isChecked={this.state.addCriteria.isReward}
onChange={this.onIsReward}
/>
</>,
<>
<Button
id="addServerBtn"
variant="secondary"
isDisabled={this.state.addCriteria.metric.length === 0}
onClick={this.onAddCriteria}
>
Add this Criteria
</Button>
</>
]
}
]);
} else {
return this.props.criterias.map((gw, i) => ({
key: 'criteria' + i,
cells: [
<>{gw.metric}</>,
<>{gw.tolerance}</>,
<>{gw.toleranceType}</>,
<>{gw.stopOnFailure ? 'true' : 'false'}</>
]
}));
}
}
renderCriteriaForm() {
return (
<>
<div>
{this.props.criterias.length === 0 && (
<span className={noCriteriaStyle}>(At least one criteria is needed)</span>
)}
<p> </p>
<Form isHorizontal={true}>
<FormGroup fieldId="name" label="Metric" isRequired={true}>
<FormSelect
value={this.state.addCriteria.metric}
id="addMetricName"
name="addMetricName"
onChange={this.onAddMetricName}
style={{ width: 'auto' }}
>
{this.props.metricNames.map((option, index) => (
<FormSelectOption isDisabled={false} key={'p' + index} value={option} label={option} />
))}
</FormSelect>
</FormGroup>
<FormGroup label="Tolerance" fieldId="addTolerancef" isValid={this.state.addCriteria.tolerance > 0}>
<TextInput
id="addTolerance"
type="number"
value={this.state.addCriteria.tolerance}
placeholder="Tolerance"
onChange={value => this.onAddTolerance(value, '')}
isValid={!isNaN(Number(this.state.addCriteria.tolerance))}
style={{ width: 'auto' }}
/>
</FormGroup>
<FormGroup label="Protocol" fieldId="addPortProtocol" isValid={this.state.addCriteria.tolerance > 0}>
<FormSelect
value={this.state.addCriteria.toleranceType}
id="addPortProtocol"
name="addPortProtocol"
onChange={this.onAddToleranceType}
style={{ width: 'auto' }}
>
{toleranceType.map((option, index) => (
<FormSelectOption isDisabled={false} key={'p' + index} value={option.name} label={option.value} />
))}
</FormSelect>
</FormGroup>
<FormGroup label="Protocol" fieldId="addPortProtocol" isValid={this.state.addCriteria.tolerance > 0}>
<Checkbox
label="Stop On Failure"
id="stopOnFailure"
name="stopOnFailure"
aria-label="Stop On Failure"
isChecked={this.state.addCriteria.stopOnFailure}
onChange={this.onAddStopOnFailure}
/>
</FormGroup>
<>
<Button
style={{ paddingLeft: '6px' }}
id="addServerBtn"
variant="primary"
isDisabled={this.state.addCriteria.metric.length === 0}
onClick={this.onAddCriteria}
>
Add this Criteria
</Button>
</>
</Form>
</div>
</>
);
}
render() {
return (
<>
<Table
aria-label="Success Criterias"
cells={headerCells}
rows={this.rows()}
// @ts-ignore
actionResolver={this.actionResolver}
>
<TableHeader />
<TableBody />
</Table>
{history.location.pathname.endsWith('/new') ? '' : this.renderCriteriaForm()}
</>
);
}
}
export default ExperimentCriteriaForm; | the_stack |
* This file was automatically generated by https://github.com/Bolisov/google-api-typings-generator. Please do not edit it manually.
* In case of any problems please post issue to https://github.com/Bolisov/google-api-typings-generator
**/
gapi.load('client', () => {
/** now we can use gapi.client */
gapi.client.load('youtube', 'v3', () => {
/** now we can use gapi.client.youtube */
/** don't forget to authenticate your client before sending any request to resources: */
/** declare client_id registered in Google Developers Console */
const client_id = '<<PUT YOUR CLIENT ID HERE>>';
const scope = [
/** Manage your YouTube account */
'https://www.googleapis.com/auth/youtube',
/** Manage your YouTube account */
'https://www.googleapis.com/auth/youtube.force-ssl',
/** View your YouTube account */
'https://www.googleapis.com/auth/youtube.readonly',
/** Manage your YouTube videos */
'https://www.googleapis.com/auth/youtube.upload',
/** View and manage your assets and associated content on YouTube */
'https://www.googleapis.com/auth/youtubepartner',
/** View private information of your YouTube channel relevant during the audit process with a YouTube partner */
'https://www.googleapis.com/auth/youtubepartner-channel-audit',
];
const immediate = true;
gapi.auth.authorize({ client_id, scope, immediate }, authResult => {
if (authResult && !authResult.error) {
/** handle succesfull authorization */
run();
} else {
/** handle authorization error */
}
});
run();
});
async function run() {
/**
* Posts a bulletin for a specific channel. (The user submitting the request must be authorized to act on the channel's behalf.)
*
* Note: Even though an activity resource can contain information about actions like a user rating a video or marking a video as a favorite, you need to
* use other API methods to generate those activity resources. For example, you would use the API's videos.rate() method to rate a video and the
* playlistItems.insert() method to mark a video as a favorite.
*/
await gapi.client.activities.insert({
part: "part",
});
/**
* Returns a list of channel activity events that match the request criteria. For example, you can retrieve events associated with a particular channel,
* events associated with the user's subscriptions and Google+ friends, or the YouTube home page feed, which is customized for each user.
*/
await gapi.client.activities.list({
channelId: "channelId",
home: true,
maxResults: 3,
mine: true,
pageToken: "pageToken",
part: "part",
publishedAfter: "publishedAfter",
publishedBefore: "publishedBefore",
regionCode: "regionCode",
});
/** Deletes a specified caption track. */
await gapi.client.captions.delete({
id: "id",
onBehalfOf: "onBehalfOf",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
});
/**
* Downloads a caption track. The caption track is returned in its original format unless the request specifies a value for the tfmt parameter and in its
* original language unless the request specifies a value for the tlang parameter.
*/
await gapi.client.captions.download({
id: "id",
onBehalfOf: "onBehalfOf",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
tfmt: "tfmt",
tlang: "tlang",
});
/** Uploads a caption track. */
await gapi.client.captions.insert({
onBehalfOf: "onBehalfOf",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
part: "part",
sync: true,
});
/**
* Returns a list of caption tracks that are associated with a specified video. Note that the API response does not contain the actual captions and that
* the captions.download method provides the ability to retrieve a caption track.
*/
await gapi.client.captions.list({
id: "id",
onBehalfOf: "onBehalfOf",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
part: "part",
videoId: "videoId",
});
/** Updates a caption track. When updating a caption track, you can change the track's draft status, upload a new caption file for the track, or both. */
await gapi.client.captions.update({
onBehalfOf: "onBehalfOf",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
part: "part",
sync: true,
});
/**
* Uploads a channel banner image to YouTube. This method represents the first two steps in a three-step process to update the banner image for a channel:
*
* - Call the channelBanners.insert method to upload the binary image data to YouTube. The image must have a 16:9 aspect ratio and be at least 2120x1192
* pixels.
* - Extract the url property's value from the response that the API returns for step 1.
* - Call the channels.update method to update the channel's branding settings. Set the brandingSettings.image.bannerExternalUrl property's value to the
* URL obtained in step 2.
*/
await gapi.client.channelBanners.insert({
channelId: "channelId",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
});
/** Deletes a channelSection. */
await gapi.client.channelSections.delete({
id: "id",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
});
/** Adds a channelSection for the authenticated user's channel. */
await gapi.client.channelSections.insert({
onBehalfOfContentOwner: "onBehalfOfContentOwner",
onBehalfOfContentOwnerChannel: "onBehalfOfContentOwnerChannel",
part: "part",
});
/** Returns channelSection resources that match the API request criteria. */
await gapi.client.channelSections.list({
channelId: "channelId",
hl: "hl",
id: "id",
mine: true,
onBehalfOfContentOwner: "onBehalfOfContentOwner",
part: "part",
});
/** Update a channelSection. */
await gapi.client.channelSections.update({
onBehalfOfContentOwner: "onBehalfOfContentOwner",
part: "part",
});
/** Returns a collection of zero or more channel resources that match the request criteria. */
await gapi.client.channels.list({
categoryId: "categoryId",
forUsername: "forUsername",
hl: "hl",
id: "id",
managedByMe: true,
maxResults: 6,
mine: true,
mySubscribers: true,
onBehalfOfContentOwner: "onBehalfOfContentOwner",
pageToken: "pageToken",
part: "part",
});
/**
* Updates a channel's metadata. Note that this method currently only supports updates to the channel resource's brandingSettings and invideoPromotion
* objects and their child properties.
*/
await gapi.client.channels.update({
onBehalfOfContentOwner: "onBehalfOfContentOwner",
part: "part",
});
/** Creates a new top-level comment. To add a reply to an existing comment, use the comments.insert method instead. */
await gapi.client.commentThreads.insert({
part: "part",
});
/** Returns a list of comment threads that match the API request parameters. */
await gapi.client.commentThreads.list({
allThreadsRelatedToChannelId: "allThreadsRelatedToChannelId",
channelId: "channelId",
id: "id",
maxResults: 4,
moderationStatus: "moderationStatus",
order: "order",
pageToken: "pageToken",
part: "part",
searchTerms: "searchTerms",
textFormat: "textFormat",
videoId: "videoId",
});
/** Modifies the top-level comment in a comment thread. */
await gapi.client.commentThreads.update({
part: "part",
});
/** Deletes a comment. */
await gapi.client.comments.delete({
id: "id",
});
/** Creates a reply to an existing comment. Note: To create a top-level comment, use the commentThreads.insert method. */
await gapi.client.comments.insert({
part: "part",
});
/** Returns a list of comments that match the API request parameters. */
await gapi.client.comments.list({
id: "id",
maxResults: 2,
pageToken: "pageToken",
parentId: "parentId",
part: "part",
textFormat: "textFormat",
});
/** Expresses the caller's opinion that one or more comments should be flagged as spam. */
await gapi.client.comments.markAsSpam({
id: "id",
});
/**
* Sets the moderation status of one or more comments. The API request must be authorized by the owner of the channel or video associated with the
* comments.
*/
await gapi.client.comments.setModerationStatus({
banAuthor: true,
id: "id",
moderationStatus: "moderationStatus",
});
/** Modifies a comment. */
await gapi.client.comments.update({
part: "part",
});
/** Lists fan funding events for a channel. */
await gapi.client.fanFundingEvents.list({
hl: "hl",
maxResults: 2,
pageToken: "pageToken",
part: "part",
});
/** Returns a list of categories that can be associated with YouTube channels. */
await gapi.client.guideCategories.list({
hl: "hl",
id: "id",
part: "part",
regionCode: "regionCode",
});
/** Returns a list of application languages that the YouTube website supports. */
await gapi.client.i18nLanguages.list({
hl: "hl",
part: "part",
});
/** Returns a list of content regions that the YouTube website supports. */
await gapi.client.i18nRegions.list({
hl: "hl",
part: "part",
});
/**
* Binds a YouTube broadcast to a stream or removes an existing binding between a broadcast and a stream. A broadcast can only be bound to one video
* stream, though a video stream may be bound to more than one broadcast.
*/
await gapi.client.liveBroadcasts.bind({
id: "id",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
onBehalfOfContentOwnerChannel: "onBehalfOfContentOwnerChannel",
part: "part",
streamId: "streamId",
});
/** Controls the settings for a slate that can be displayed in the broadcast stream. */
await gapi.client.liveBroadcasts.control({
displaySlate: true,
id: "id",
offsetTimeMs: "offsetTimeMs",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
onBehalfOfContentOwnerChannel: "onBehalfOfContentOwnerChannel",
part: "part",
walltime: "walltime",
});
/** Deletes a broadcast. */
await gapi.client.liveBroadcasts.delete({
id: "id",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
onBehalfOfContentOwnerChannel: "onBehalfOfContentOwnerChannel",
});
/** Creates a broadcast. */
await gapi.client.liveBroadcasts.insert({
onBehalfOfContentOwner: "onBehalfOfContentOwner",
onBehalfOfContentOwnerChannel: "onBehalfOfContentOwnerChannel",
part: "part",
});
/** Returns a list of YouTube broadcasts that match the API request parameters. */
await gapi.client.liveBroadcasts.list({
broadcastStatus: "broadcastStatus",
broadcastType: "broadcastType",
id: "id",
maxResults: 4,
mine: true,
onBehalfOfContentOwner: "onBehalfOfContentOwner",
onBehalfOfContentOwnerChannel: "onBehalfOfContentOwnerChannel",
pageToken: "pageToken",
part: "part",
});
/**
* Changes the status of a YouTube live broadcast and initiates any processes associated with the new status. For example, when you transition a
* broadcast's status to testing, YouTube starts to transmit video to that broadcast's monitor stream. Before calling this method, you should confirm that
* the value of the status.streamStatus property for the stream bound to your broadcast is active.
*/
await gapi.client.liveBroadcasts.transition({
broadcastStatus: "broadcastStatus",
id: "id",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
onBehalfOfContentOwnerChannel: "onBehalfOfContentOwnerChannel",
part: "part",
});
/** Updates a broadcast. For example, you could modify the broadcast settings defined in the liveBroadcast resource's contentDetails object. */
await gapi.client.liveBroadcasts.update({
onBehalfOfContentOwner: "onBehalfOfContentOwner",
onBehalfOfContentOwnerChannel: "onBehalfOfContentOwnerChannel",
part: "part",
});
/** Removes a chat ban. */
await gapi.client.liveChatBans.delete({
id: "id",
});
/** Adds a new ban to the chat. */
await gapi.client.liveChatBans.insert({
part: "part",
});
/** Deletes a chat message. */
await gapi.client.liveChatMessages.delete({
id: "id",
});
/** Adds a message to a live chat. */
await gapi.client.liveChatMessages.insert({
part: "part",
});
/** Lists live chat messages for a specific chat. */
await gapi.client.liveChatMessages.list({
hl: "hl",
liveChatId: "liveChatId",
maxResults: 3,
pageToken: "pageToken",
part: "part",
profileImageSize: 6,
});
/** Removes a chat moderator. */
await gapi.client.liveChatModerators.delete({
id: "id",
});
/** Adds a new moderator for the chat. */
await gapi.client.liveChatModerators.insert({
part: "part",
});
/** Lists moderators for a live chat. */
await gapi.client.liveChatModerators.list({
liveChatId: "liveChatId",
maxResults: 2,
pageToken: "pageToken",
part: "part",
});
/** Deletes a video stream. */
await gapi.client.liveStreams.delete({
id: "id",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
onBehalfOfContentOwnerChannel: "onBehalfOfContentOwnerChannel",
});
/** Creates a video stream. The stream enables you to send your video to YouTube, which can then broadcast the video to your audience. */
await gapi.client.liveStreams.insert({
onBehalfOfContentOwner: "onBehalfOfContentOwner",
onBehalfOfContentOwnerChannel: "onBehalfOfContentOwnerChannel",
part: "part",
});
/** Returns a list of video streams that match the API request parameters. */
await gapi.client.liveStreams.list({
id: "id",
maxResults: 2,
mine: true,
onBehalfOfContentOwner: "onBehalfOfContentOwner",
onBehalfOfContentOwnerChannel: "onBehalfOfContentOwnerChannel",
pageToken: "pageToken",
part: "part",
});
/** Updates a video stream. If the properties that you want to change cannot be updated, then you need to create a new stream with the proper settings. */
await gapi.client.liveStreams.update({
onBehalfOfContentOwner: "onBehalfOfContentOwner",
onBehalfOfContentOwnerChannel: "onBehalfOfContentOwnerChannel",
part: "part",
});
/** Deletes a playlist item. */
await gapi.client.playlistItems.delete({
id: "id",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
});
/** Adds a resource to a playlist. */
await gapi.client.playlistItems.insert({
onBehalfOfContentOwner: "onBehalfOfContentOwner",
part: "part",
});
/**
* Returns a collection of playlist items that match the API request parameters. You can retrieve all of the playlist items in a specified playlist or
* retrieve one or more playlist items by their unique IDs.
*/
await gapi.client.playlistItems.list({
id: "id",
maxResults: 2,
onBehalfOfContentOwner: "onBehalfOfContentOwner",
pageToken: "pageToken",
part: "part",
playlistId: "playlistId",
videoId: "videoId",
});
/** Modifies a playlist item. For example, you could update the item's position in the playlist. */
await gapi.client.playlistItems.update({
onBehalfOfContentOwner: "onBehalfOfContentOwner",
part: "part",
});
/** Deletes a playlist. */
await gapi.client.playlists.delete({
id: "id",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
});
/** Creates a playlist. */
await gapi.client.playlists.insert({
onBehalfOfContentOwner: "onBehalfOfContentOwner",
onBehalfOfContentOwnerChannel: "onBehalfOfContentOwnerChannel",
part: "part",
});
/**
* Returns a collection of playlists that match the API request parameters. For example, you can retrieve all playlists that the authenticated user owns,
* or you can retrieve one or more playlists by their unique IDs.
*/
await gapi.client.playlists.list({
channelId: "channelId",
hl: "hl",
id: "id",
maxResults: 4,
mine: true,
onBehalfOfContentOwner: "onBehalfOfContentOwner",
onBehalfOfContentOwnerChannel: "onBehalfOfContentOwnerChannel",
pageToken: "pageToken",
part: "part",
});
/** Modifies a playlist. For example, you could change a playlist's title, description, or privacy status. */
await gapi.client.playlists.update({
onBehalfOfContentOwner: "onBehalfOfContentOwner",
part: "part",
});
/**
* Returns a collection of search results that match the query parameters specified in the API request. By default, a search result set identifies
* matching video, channel, and playlist resources, but you can also configure queries to only retrieve a specific type of resource.
*/
await gapi.client.search.list({
channelId: "channelId",
channelType: "channelType",
eventType: "eventType",
forContentOwner: true,
forDeveloper: true,
forMine: true,
location: "location",
locationRadius: "locationRadius",
maxResults: 9,
onBehalfOfContentOwner: "onBehalfOfContentOwner",
order: "order",
pageToken: "pageToken",
part: "part",
publishedAfter: "publishedAfter",
publishedBefore: "publishedBefore",
q: "q",
regionCode: "regionCode",
relatedToVideoId: "relatedToVideoId",
relevanceLanguage: "relevanceLanguage",
safeSearch: "safeSearch",
topicId: "topicId",
type: "type",
videoCaption: "videoCaption",
videoCategoryId: "videoCategoryId",
videoDefinition: "videoDefinition",
videoDimension: "videoDimension",
videoDuration: "videoDuration",
videoEmbeddable: "videoEmbeddable",
videoLicense: "videoLicense",
videoSyndicated: "videoSyndicated",
videoType: "videoType",
});
/** Lists sponsors for a channel. */
await gapi.client.sponsors.list({
filter: "filter",
maxResults: 2,
pageToken: "pageToken",
part: "part",
});
/** Deletes a subscription. */
await gapi.client.subscriptions.delete({
id: "id",
});
/** Adds a subscription for the authenticated user's channel. */
await gapi.client.subscriptions.insert({
part: "part",
});
/** Returns subscription resources that match the API request criteria. */
await gapi.client.subscriptions.list({
channelId: "channelId",
forChannelId: "forChannelId",
id: "id",
maxResults: 4,
mine: true,
myRecentSubscribers: true,
mySubscribers: true,
onBehalfOfContentOwner: "onBehalfOfContentOwner",
onBehalfOfContentOwnerChannel: "onBehalfOfContentOwnerChannel",
order: "order",
pageToken: "pageToken",
part: "part",
});
/** Lists Super Chat events for a channel. */
await gapi.client.superChatEvents.list({
hl: "hl",
maxResults: 2,
pageToken: "pageToken",
part: "part",
});
/** Uploads a custom video thumbnail to YouTube and sets it for a video. */
await gapi.client.thumbnails.set({
onBehalfOfContentOwner: "onBehalfOfContentOwner",
videoId: "videoId",
});
/** Returns a list of abuse reasons that can be used for reporting abusive videos. */
await gapi.client.videoAbuseReportReasons.list({
hl: "hl",
part: "part",
});
/** Returns a list of categories that can be associated with YouTube videos. */
await gapi.client.videoCategories.list({
hl: "hl",
id: "id",
part: "part",
regionCode: "regionCode",
});
/** Deletes a YouTube video. */
await gapi.client.videos.delete({
id: "id",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
});
/** Retrieves the ratings that the authorized user gave to a list of specified videos. */
await gapi.client.videos.getRating({
id: "id",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
});
/** Uploads a video to YouTube and optionally sets the video's metadata. */
await gapi.client.videos.insert({
autoLevels: true,
notifySubscribers: true,
onBehalfOfContentOwner: "onBehalfOfContentOwner",
onBehalfOfContentOwnerChannel: "onBehalfOfContentOwnerChannel",
part: "part",
stabilize: true,
});
/** Returns a list of videos that match the API request parameters. */
await gapi.client.videos.list({
chart: "chart",
hl: "hl",
id: "id",
locale: "locale",
maxHeight: 5,
maxResults: 6,
maxWidth: 7,
myRating: "myRating",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
pageToken: "pageToken",
part: "part",
regionCode: "regionCode",
videoCategoryId: "videoCategoryId",
});
/** Add a like or dislike rating to a video or remove a rating from a video. */
await gapi.client.videos.rate({
id: "id",
rating: "rating",
});
/** Report abuse for a video. */
await gapi.client.videos.reportAbuse({
onBehalfOfContentOwner: "onBehalfOfContentOwner",
});
/** Updates a video's metadata. */
await gapi.client.videos.update({
onBehalfOfContentOwner: "onBehalfOfContentOwner",
part: "part",
});
/** Uploads a watermark image to YouTube and sets it for a channel. */
await gapi.client.watermarks.set({
channelId: "channelId",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
});
/** Deletes a channel's watermark image. */
await gapi.client.watermarks.unset({
channelId: "channelId",
onBehalfOfContentOwner: "onBehalfOfContentOwner",
});
}
}); | the_stack |
import { ContainerAdapterClient } from '../../container_adapter_client'
import { MasterNodeRegTestContainer } from '@defichain/testcontainers'
import {
ExtHTLC,
HTLC,
ICXClaimDFCHTLCInfo,
ICXDFCHTLCInfo,
ICXEXTHTLCInfo,
ICXGenericResult,
ICXHTLCType,
ICXListHTLCOptions,
ICXOffer,
ICXOfferInfo,
ICXOrder,
ICXOrderInfo,
ICXOrderStatus
} from '../../../src/category/icxorderbook'
import BigNumber from 'bignumber.js'
import { accountBTC, accountDFI, ICXSetup, idDFI, symbolDFI } from './icx_setup'
import { RpcApiError } from '@defichain/jellyfish-api-core'
describe('ICXOrderBook.submitDFCHTLC', () => {
const container = new MasterNodeRegTestContainer()
const client = new ContainerAdapterClient(container)
const icxSetup = new ICXSetup(container, client)
beforeAll(async () => {
await container.start()
await container.waitForReady()
await container.waitForWalletCoinbaseMaturity()
await icxSetup.createAccounts()
await icxSetup.createBTCToken()
await icxSetup.initializeTokensIds()
await icxSetup.mintBTCtoken(100)
await icxSetup.fundAccount(accountDFI, symbolDFI, 500)
await icxSetup.fundAccount(accountBTC, symbolDFI, 10) // for fee
await icxSetup.createBTCDFIPool()
await icxSetup.addLiquidityToBTCDFIPool(1, 100)
await icxSetup.setTakerFee(0.001)
})
afterAll(async () => {
await container.stop()
})
afterEach(async () => {
await icxSetup.closeAllOpenOffers()
})
it('should submit DFC HTLC for a DFC buy offer', async () => {
// create order - maker
const order: ICXOrder = {
tokenFrom: idDFI,
chainTo: 'BTC',
ownerAddress: accountDFI,
receivePubkey: '037f9563f30c609b19fd435a19b8bde7d6db703012ba1aba72e9f42a87366d1941',
amountFrom: new BigNumber(15),
orderPrice: new BigNumber(0.01)
}
const createOrderResult: ICXGenericResult = await client.icxorderbook.createOrder(order, [])
const createOrderTxId = createOrderResult.txid
await container.generate(1)
// list ICX orders
const ordersAfterCreateOrder: Record<string, ICXOrderInfo | ICXOfferInfo> = await client.icxorderbook.listOrders()
expect((ordersAfterCreateOrder as Record<string, ICXOrderInfo>)[createOrderTxId].status).toStrictEqual(ICXOrderStatus.OPEN)
const accountBTCBeforeOffer: Record<string, BigNumber> = await client.call('getaccount', [accountBTC, {}, true], 'bignumber')
// make Offer to partial amount 10 DFI - taker
const offer: ICXOffer = {
orderTx: createOrderTxId,
amount: new BigNumber(0.10), // 0.10 BTC = 10 DFI
ownerAddress: accountBTC
}
const makeOfferResult = await client.icxorderbook.makeOffer(offer, [])
const makeOfferTxId = makeOfferResult.txid
await container.generate(1)
const accountBTCAfterOffer: Record<string, BigNumber> = await client.call('getaccount', [accountBTC, {}, true], 'bignumber')
// check fee of 0.01 DFI has been reduced from the accountBTCBeforeOffer[idDFI]
// Fee = takerFeePerBTC(inBTC) * amount(inBTC) * DEX DFI per BTC rate
expect(accountBTCAfterOffer[idDFI]).toStrictEqual(accountBTCBeforeOffer[idDFI].minus(0.01))
// List the ICX offers for orderTx = createOrderTxId and check
const offersForOrder1: Record<string, ICXOrderInfo | ICXOfferInfo> = await client.icxorderbook.listOrders({ orderTx: createOrderTxId })
expect(Object.keys(offersForOrder1).length).toStrictEqual(2) // extra entry for the warning text returned by the RPC atm.
expect((offersForOrder1 as Record<string, ICXOfferInfo>)[makeOfferTxId].status).toStrictEqual(ICXOrderStatus.OPEN)
const accountDFIBeforeDFCHTLC: Record<string, BigNumber> = await client.call('getaccount', [accountDFI, {}, true], 'bignumber')
// create DFCHTLC - maker
const DFCHTLC: HTLC = {
offerTx: makeOfferTxId,
amount: new BigNumber(10), // in DFC
hash: '957fc0fd643f605b2938e0631a61529fd70bd35b2162a21d978c41e5241a5220',
timeout: 1440
}
const DFCHTLCTxId = (await client.icxorderbook.submitDFCHTLC(DFCHTLC)).txid
await container.generate(1)
const accountDFIAfterDFCHTLC: Record<string, BigNumber> = await client.call('getaccount', [accountDFI, {}, true], 'bignumber')
// maker fee should be reduced from accountDFIBeforeDFCHTLC
expect(accountDFIAfterDFCHTLC[idDFI]).toStrictEqual(accountDFIBeforeDFCHTLC[idDFI].minus(0.01))
// List htlc
const listHTLCOptions: ICXListHTLCOptions = {
offerTx: makeOfferTxId
}
const HTLCs: Record<string, ICXDFCHTLCInfo | ICXEXTHTLCInfo | ICXClaimDFCHTLCInfo> = await client.call('icx_listhtlcs', [listHTLCOptions], 'bignumber')
expect(Object.keys(HTLCs).length).toStrictEqual(2) // extra entry for the warning text returned by the RPC atm.
expect(HTLCs[DFCHTLCTxId] as ICXDFCHTLCInfo).toStrictEqual(
{
type: ICXHTLCType.DFC,
status: ICXOrderStatus.OPEN,
offerTx: makeOfferTxId,
amount: DFCHTLC.amount,
amountInEXTAsset: DFCHTLC.amount.multipliedBy(order.orderPrice),
hash: DFCHTLC.hash,
timeout: new BigNumber(DFCHTLC.timeout as number),
height: expect.any(BigNumber),
refundHeight: expect.any(BigNumber)
}
)
})
it('should submit DFC HTLC for a lower amount than the amount in DFC buy offer', async () => {
// create order - maker
const order: ICXOrder = {
tokenFrom: idDFI,
chainTo: 'BTC',
ownerAddress: accountDFI,
receivePubkey: '037f9563f30c609b19fd435a19b8bde7d6db703012ba1aba72e9f42a87366d1941',
amountFrom: new BigNumber(15),
orderPrice: new BigNumber(0.01)
}
const createOrderResult: ICXGenericResult = await client.icxorderbook.createOrder(order, [])
const createOrderTxId = createOrderResult.txid
await container.generate(1)
// list ICX orders
const ordersAfterCreateOrder: Record<string, ICXOrderInfo | ICXOfferInfo> = await client.icxorderbook.listOrders()
expect((ordersAfterCreateOrder as Record<string, ICXOrderInfo>)[createOrderTxId].status).toStrictEqual(ICXOrderStatus.OPEN)
const accountBTCBeforeOffer: Record<string, BigNumber> = await client.call('getaccount', [accountBTC, {}, true], 'bignumber')
// make Offer to partial amount 10 DFI - taker
const offer: ICXOffer = {
orderTx: createOrderTxId,
amount: new BigNumber(0.10), // 0.10 BTC = 10 DFI
ownerAddress: accountBTC
}
const makeOfferResult = await client.icxorderbook.makeOffer(offer, [])
const makeOfferTxId = makeOfferResult.txid
await container.generate(1)
const accountBTCAfterOffer: Record<string, BigNumber> = await client.call('getaccount', [accountBTC, {}, true], 'bignumber')
// check fee of 0.01 DFI has been reduced from the accountBTCBeforeOffer[idDFI]
// Fee = takerFeePerBTC(inBTC) * amount(inBTC) * DEX DFI per BTC rate
expect(accountBTCAfterOffer[idDFI]).toStrictEqual(accountBTCBeforeOffer[idDFI].minus(0.01))
// List the ICX offers for orderTx = createOrderTxId and check
const offersForOrder1: Record<string, ICXOrderInfo | ICXOfferInfo> = await client.icxorderbook.listOrders({ orderTx: createOrderTxId })
expect(Object.keys(offersForOrder1).length).toStrictEqual(2) // extra entry for the warning text returned by the RPC atm.
expect((offersForOrder1 as Record<string, ICXOfferInfo>)[makeOfferTxId].status).toStrictEqual(ICXOrderStatus.OPEN)
const accountDFIBeforeDFCHTLC: Record<string, BigNumber> = await client.call('getaccount', [accountDFI, {}, true], 'bignumber')
// create DFCHTLC for 5 DFI - maker
const DFCHTLC: HTLC = {
offerTx: makeOfferTxId,
amount: new BigNumber(5), // lower than the amout in offer
hash: '957fc0fd643f605b2938e0631a61529fd70bd35b2162a21d978c41e5241a5220',
timeout: 1440
}
const DFCHTLCTxId = (await client.icxorderbook.submitDFCHTLC(DFCHTLC)).txid
await container.generate(1)
const accountDFIAfterDFCHTLC: Record<string, BigNumber> = await client.call('getaccount', [accountDFI, {}, true], 'bignumber')
// maker fee should be reduced from accountDFIBeforeDFCHTLC
expect(accountDFIAfterDFCHTLC[idDFI]).toStrictEqual(accountDFIBeforeDFCHTLC[idDFI].minus(0.005))
// List htlc
const listHTLCOptions: ICXListHTLCOptions = {
offerTx: makeOfferTxId
}
const HTLCs: Record<string, ICXDFCHTLCInfo | ICXEXTHTLCInfo | ICXClaimDFCHTLCInfo> = await client.call('icx_listhtlcs', [listHTLCOptions], 'bignumber')
expect(Object.keys(HTLCs).length).toStrictEqual(2) // extra entry for the warning text returned by the RPC atm.
expect(HTLCs[DFCHTLCTxId] as ICXDFCHTLCInfo).toStrictEqual(
{
type: ICXHTLCType.DFC,
status: ICXOrderStatus.OPEN,
offerTx: makeOfferTxId,
amount: DFCHTLC.amount,
amountInEXTAsset: DFCHTLC.amount.multipliedBy(order.orderPrice),
hash: DFCHTLC.hash,
timeout: new BigNumber(DFCHTLC.timeout as number),
height: expect.any(BigNumber),
refundHeight: expect.any(BigNumber)
}
)
})
it('should submit DFC HTLC for a BTC buy offer', async () => {
const order: ICXOrder = {
chainFrom: 'BTC',
tokenTo: idDFI,
ownerAddress: accountDFI,
amountFrom: new BigNumber(2),
orderPrice: new BigNumber(100)
}
// create order - maker
const createOrderResult: ICXGenericResult = await client.icxorderbook.createOrder(order, [])
const createOrderTxId = createOrderResult.txid
await container.generate(1)
// list ICX orders
const ordersAfterCreateOrder: Record<string, ICXOrderInfo | ICXOfferInfo> = await client.icxorderbook.listOrders()
expect((ordersAfterCreateOrder as Record<string, ICXOrderInfo>)[createOrderTxId].status).toStrictEqual(ICXOrderStatus.OPEN)
const accountBTCBeforeOffer: Record<string, BigNumber> = await client.call('getaccount', [accountBTC, {}, true], 'bignumber')
// create Offer to partial amount 1 BTC - taker
const offer: ICXOffer = {
orderTx: createOrderTxId,
amount: new BigNumber(100), // 100 DFI = 1 BTC
ownerAddress: accountBTC,
receivePubkey: '0348790cb93b203a8ea5ce07279cb209d807b535b2ca8b0988a6f7a6578e41f7a5'
}
const makeOfferResult = await client.icxorderbook.makeOffer(offer, [])
const makeOfferTxId = makeOfferResult.txid
await container.generate(1)
const accountBTCAfterOffer: Record<string, BigNumber> = await client.call('getaccount', [accountBTC, {}, true], 'bignumber')
// check fee of 0.1 DFI has been reduced from the accountBTCBeforeOffer[idDFI]
// Fee = takerFeePerBTC(inBTC) * amount(inBTC) * DEX DFI per BTC rate
expect(accountBTCAfterOffer[idDFI]).toStrictEqual(accountBTCBeforeOffer[idDFI].minus(0.1))
// List the ICX offers for orderTx = createOrderTxId and check
const offersForOrder1: Record<string, ICXOrderInfo | ICXOfferInfo> = await client.icxorderbook.listOrders({ orderTx: createOrderTxId })
expect(Object.keys(offersForOrder1).length).toStrictEqual(2) // extra entry for the warning text returned by the RPC atm.
expect((offersForOrder1 as Record<string, ICXOfferInfo>)[makeOfferTxId].status).toStrictEqual(ICXOrderStatus.OPEN)
const accountDFIBeforeExtHTLC: Record<string, BigNumber> = await client.call('getaccount', [accountDFI, {}, true], 'bignumber')
// submit EXT HTLC - maker
const ExtHTLC: ExtHTLC = {
offerTx: makeOfferTxId,
amount: new BigNumber(1),
hash: '957fc0fd643f605b2938e0631a61529fd70bd35b2162a21d978c41e5241a5220',
htlcScriptAddress: '13sJQ9wBWh8ssihHUgAaCmNWJbBAG5Hr9N',
ownerPubkey: '036494e7c9467c8c7ff3bf29e841907fb0fa24241866569944ea422479ec0e6252',
timeout: 100
}
const ExtHTLCTxId = (await container.call('icx_submitexthtlc', [ExtHTLC])).txid
await container.generate(1)
// List htlc
const listHTLCOptions = {
offerTx: makeOfferTxId
}
const HTLCs: Record<string, ICXDFCHTLCInfo | ICXEXTHTLCInfo | ICXClaimDFCHTLCInfo> = await client.call('icx_listhtlcs', [listHTLCOptions], 'bignumber')
expect(Object.keys(HTLCs).length).toStrictEqual(2) // extra entry for the warning text returned by the RPC atm.
expect(HTLCs[ExtHTLCTxId] as ICXEXTHTLCInfo).toStrictEqual(
{
type: ICXHTLCType.EXTERNAL,
status: ICXOrderStatus.OPEN,
offerTx: makeOfferTxId,
amount: ExtHTLC.amount,
amountInDFCAsset: ExtHTLC.amount.multipliedBy(order.orderPrice),
hash: ExtHTLC.hash,
htlcScriptAddress: ExtHTLC.htlcScriptAddress,
ownerPubkey: ExtHTLC.ownerPubkey,
timeout: new BigNumber(ExtHTLC.timeout),
height: expect.any(BigNumber)
}
)
const accountDFIAfterExtHTLC: Record<string, BigNumber> = await client.call('getaccount', [accountDFI, {}, true], 'bignumber')
// maker deposit should be reduced from accountDFI
expect(accountDFIAfterExtHTLC[idDFI]).toStrictEqual(accountDFIBeforeExtHTLC[idDFI].minus(0.1))
const accountBTCBeforeDFCHTLC: Record<string, BigNumber> = await client.call('getaccount', [accountBTC, {}, true], 'bignumber')
// create DFCHTLC - taker
const DFCHTLC: HTLC = {
offerTx: makeOfferTxId,
amount: new BigNumber(100), // in DFC
hash: '957fc0fd643f605b2938e0631a61529fd70bd35b2162a21d978c41e5241a5220'
}
const DFCHTLCTxId = (await client.icxorderbook.submitDFCHTLC(DFCHTLC)).txid
await container.generate(1)
const accountBTCAfterDFCHTLC: Record<string, BigNumber> = await client.call('getaccount', [accountBTC, {}, true], 'bignumber')
expect(accountBTCAfterDFCHTLC[idDFI]).toStrictEqual(accountBTCBeforeDFCHTLC[idDFI].minus(100))
// List htlc
const listHTLCOptionsAfterDFCHTLC = {
offerTx: makeOfferTxId
}
const HTLCsAfterDFCHTLC: Record<string, ICXDFCHTLCInfo | ICXEXTHTLCInfo | ICXClaimDFCHTLCInfo> = await client.call('icx_listhtlcs', [listHTLCOptionsAfterDFCHTLC], 'bignumber')
expect(Object.keys(HTLCsAfterDFCHTLC).length).toStrictEqual(3) // extra entry for the warning text returned by the RPC atm.
expect(HTLCsAfterDFCHTLC[DFCHTLCTxId] as ICXDFCHTLCInfo).toStrictEqual(
{
type: ICXHTLCType.DFC,
status: ICXOrderStatus.OPEN,
offerTx: makeOfferTxId,
amount: DFCHTLC.amount,
amountInEXTAsset: DFCHTLC.amount.dividedBy(order.orderPrice),
hash: DFCHTLC.hash,
timeout: new BigNumber(480),
height: expect.any(BigNumber),
refundHeight: expect.any(BigNumber)
}
)
})
it('should submit DFC HTLC with input utxos for a DFC buy offer', async () => {
const { order, createOrderTxId } = await icxSetup.createDFISellOrder('BTC', accountDFI, '037f9563f30c609b19fd435a19b8bde7d6db703012ba1aba72e9f42a87366d1941', new BigNumber(15), new BigNumber(0.01))
const { makeOfferTxId } = await icxSetup.createDFIBuyOffer(createOrderTxId, new BigNumber(0.10), accountBTC)
const accountDFIBeforeDFCHTLC: Record<string, BigNumber> = await client.call('getaccount', [accountDFI, {}, true], 'bignumber')
// create DFCHTLC - maker
const DFCHTLC: HTLC = {
offerTx: makeOfferTxId,
amount: new BigNumber(10), // in DFC
hash: '957fc0fd643f605b2938e0631a61529fd70bd35b2162a21d978c41e5241a5220',
timeout: 1440
}
// input utxos
const inputUTXOs = await container.fundAddress(accountDFI, 10)
const DFCHTLCTxId = (await client.icxorderbook.submitDFCHTLC(DFCHTLC, [inputUTXOs])).txid
await container.generate(1)
const rawtx = await container.call('getrawtransaction', [DFCHTLCTxId, true])
expect(rawtx.vin[0].txid).toStrictEqual(inputUTXOs.txid)
expect(rawtx.vin[0].vout).toStrictEqual(inputUTXOs.vout)
const accountDFIAfterDFCHTLC: Record<string, BigNumber> = await client.call('getaccount', [accountDFI, {}, true], 'bignumber')
// maker fee should be reduced from accountDFIBeforeDFCHTLC
expect(accountDFIAfterDFCHTLC[idDFI]).toStrictEqual(accountDFIBeforeDFCHTLC[idDFI].minus(0.01))
// List htlc
const listHTLCOptions: ICXListHTLCOptions = {
offerTx: makeOfferTxId
}
const HTLCs: Record<string, ICXDFCHTLCInfo | ICXEXTHTLCInfo | ICXClaimDFCHTLCInfo> = await client.call('icx_listhtlcs', [listHTLCOptions], 'bignumber')
expect(Object.keys(HTLCs).length).toStrictEqual(2) // extra entry for the warning text returned by the RPC atm.
expect(HTLCs[DFCHTLCTxId] as ICXDFCHTLCInfo).toStrictEqual(
{
type: ICXHTLCType.DFC,
status: ICXOrderStatus.OPEN,
offerTx: makeOfferTxId,
amount: DFCHTLC.amount,
amountInEXTAsset: DFCHTLC.amount.multipliedBy(order.orderPrice),
hash: DFCHTLC.hash,
timeout: new BigNumber(DFCHTLC.timeout as number),
height: expect.any(BigNumber),
refundHeight: expect.any(BigNumber)
}
)
})
it('should return an error when submiting DFC HTLC with invalid HTLC.offerTx', async () => {
const { createOrderTxId } = await icxSetup.createDFISellOrder('BTC', accountDFI, '037f9563f30c609b19fd435a19b8bde7d6db703012ba1aba72e9f42a87366d1941', new BigNumber(15), new BigNumber(0.01))
const { makeOfferTxId } = await icxSetup.createDFIBuyOffer(createOrderTxId, new BigNumber(0.10), accountBTC)
const accountDFIBeforeDFCHTLC: Record<string, BigNumber> = await client.call('getaccount', [accountDFI, {}, true], 'bignumber')
// create DFCHTLC with invalid offerTx "INVALID_OFFER_TX_ID" - maker
const DFCHTLC: HTLC = {
offerTx: 'INVALID_OFFER_TX_ID',
amount: new BigNumber(10), // in DFC
hash: '957fc0fd643f605b2938e0631a61529fd70bd35b2162a21d978c41e5241a5220',
timeout: 1440
}
const promise = client.icxorderbook.submitDFCHTLC(DFCHTLC)
await expect(promise).rejects.toThrow(RpcApiError)
await expect(promise).rejects.toThrow('RpcApiError: \'offerTx (0000000000000000000000000000000000000000000000000000000000000000) does not exist\', code: -8, method: icx_submitdfchtlc')
// create DFCHTLC with invalid offerTx "123" - maker
const DFCHTLC2: HTLC = {
offerTx: '123',
amount: new BigNumber(10), // in DFC
hash: '957fc0fd643f605b2938e0631a61529fd70bd35b2162a21d978c41e5241a5220',
timeout: 1440
}
const promise2 = client.icxorderbook.submitDFCHTLC(DFCHTLC2)
await expect(promise2).rejects.toThrow(RpcApiError)
await expect(promise2).rejects.toThrow('RpcApiError: \'offerTx (0000000000000000000000000000000000000000000000000000000000000123) does not exist\', code: -8, method: icx_submitdfchtlc')
const accountDFIAfterDFCHTLC: Record<string, BigNumber> = await client.call('getaccount', [accountDFI, {}, true], 'bignumber')
// should be the same balance as accountDFIBeforeDFCHTLC
expect(accountDFIAfterDFCHTLC[idDFI]).toStrictEqual(accountDFIBeforeDFCHTLC[idDFI])
// List htlc
const listHTLCOptions: ICXListHTLCOptions = {
offerTx: makeOfferTxId
}
const HTLCs: Record<string, ICXDFCHTLCInfo | ICXEXTHTLCInfo | ICXClaimDFCHTLCInfo> = await client.call('icx_listhtlcs', [listHTLCOptions], 'bignumber')
expect(Object.keys(HTLCs).length).toStrictEqual(1) // extra entry for the warning text returned by the RPC atm.
})
it('should return an error when submiting DFC HTLC with incorrect HTLC.amount from the amount in the relavant offer', async () => {
const { createOrderTxId } = await icxSetup.createDFISellOrder('BTC', accountDFI, '037f9563f30c609b19fd435a19b8bde7d6db703012ba1aba72e9f42a87366d1941', new BigNumber(15), new BigNumber(0.01))
const { makeOfferTxId } = await icxSetup.createDFIBuyOffer(createOrderTxId, new BigNumber(0.10), accountBTC)
const accountDFIBeforeDFCHTLC: Record<string, BigNumber> = await client.call('getaccount', [accountDFI, {}, true], 'bignumber')
// create DFCHTLC with incorrect amount 15
const DFCHTLC: HTLC = {
offerTx: makeOfferTxId,
amount: new BigNumber(15), // here the amount is 15 DFI, but in the offer the amount is 10 DFI
hash: '957fc0fd643f605b2938e0631a61529fd70bd35b2162a21d978c41e5241a5220',
timeout: 1440
}
const promise = client.icxorderbook.submitDFCHTLC(DFCHTLC)
await expect(promise).rejects.toThrow(RpcApiError)
await expect(promise).rejects.toThrow('RpcApiError: \'Test ICXSubmitDFCHTLCTx execution failed:\namount must be lower or equal the offer one\', code: -32600, method: icx_submitdfchtlc')
const accountDFIAfterDFCHTLC: Record<string, BigNumber> = await client.call('getaccount', [accountDFI, {}, true], 'bignumber')
// should be the same balance as accountDFIBeforeDFCHTLC
expect(accountDFIAfterDFCHTLC[idDFI]).toStrictEqual(accountDFIBeforeDFCHTLC[idDFI])
// List htlc
const listHTLCOptions: ICXListHTLCOptions = {
offerTx: makeOfferTxId
}
const HTLCs: Record<string, ICXDFCHTLCInfo | ICXEXTHTLCInfo | ICXClaimDFCHTLCInfo> = await client.call('icx_listhtlcs', [listHTLCOptions], 'bignumber')
expect(Object.keys(HTLCs).length).toStrictEqual(1) // extra entry for the warning text returned by the RPC atm.
})
}) | the_stack |
require('./data-cube-edit.css');
import * as React from 'react';
import { List } from 'immutable';
import { AttributeInfo } from 'swiv-plywood';
import { classNames } from '../../../utils/dom/dom';
import { generateUniqueName } from '../../../../common/utils/string/string';
import { ImmutableUtils } from "../../../../common/utils/immutable-utils/immutable-utils";
import { Duration, Timezone } from 'chronoshift';
import { DATA_CUBES_STRATEGIES_LABELS, STRINGS } from '../../../config/constants';
import { SvgIcon, FormLabel, Button, SimpleList, ImmutableInput, ImmutableList, ImmutableDropdown } from '../../../components/index';
import { DimensionModal, MeasureModal, SuggestionModal } from '../../../modals/index';
import { AppSettings, ListItem, Cluster, DataCube, Dimension, DimensionJS, Measure, MeasureJS } from '../../../../common/models/index';
import { DATA_CUBE as LABELS } from '../../../../common/models/labels';
import { ImmutableFormDelegate, ImmutableFormState } from '../../../utils/immutable-form-delegate/immutable-form-delegate';
export interface DataCubeEditProps extends React.Props<any> {
isNewDataCube?: boolean;
dataCube?: DataCube;
clusters?: Cluster[];
tab?: string;
onSave: (newDataCube: DataCube) => void;
onCancel?: () => void;
}
export interface DataCubeEditState extends ImmutableFormState<DataCube> {
tab?: any;
showDimensionsSuggestion?: boolean;
showMeasuresSuggestion?: boolean;
}
export interface Tab {
label: string;
value: string;
render: () => JSX.Element;
}
export class DataCubeEdit extends React.Component<DataCubeEditProps, DataCubeEditState> {
private tabs: Tab[] = [
{label: 'General', value: 'general', render: this.renderGeneral},
{label: 'Attributes', value: 'attributes', render: this.renderAttributes},
{label: 'Dimensions', value: 'dimensions', render: this.renderDimensions},
{label: 'Measures', value: 'measures', render: this.renderMeasures}
];
private delegate: ImmutableFormDelegate<DataCube>;
constructor() {
super();
this.delegate = new ImmutableFormDelegate<DataCube>(this);
}
componentWillReceiveProps(nextProps: DataCubeEditProps) {
if (nextProps.dataCube) {
this.initFromProps(nextProps);
}
}
componentDidMount() {
if (this.props.dataCube) this.initFromProps(this.props);
}
initFromProps(props: DataCubeEditProps) {
this.setState({
newInstance: new DataCube(props.dataCube.valueOf()),
canSave: true,
errors: {},
tab: props.isNewDataCube ? this.tabs[0] : this.tabs.filter((tab) => tab.value === props.tab)[0],
showDimensionsSuggestion: false,
showMeasuresSuggestion: false
});
}
selectTab(tab: Tab) {
if (this.props.isNewDataCube) {
this.setState({tab});
} else {
var hash = window.location.hash.split('/');
hash.splice(-1);
window.location.hash = hash.join('/') + '/' + tab.value;
}
}
renderTabs(activeTab: Tab): JSX.Element[] {
return this.tabs.map((tab) => {
return <button
className={classNames({active: activeTab.value === tab.value})}
key={tab.value}
onClick={this.selectTab.bind(this, tab)}
>{tab.label}</button>;
});
}
cancel() {
const { isNewDataCube } = this.props;
if (isNewDataCube) {
this.props.onCancel();
return;
}
// Setting newInstance to undefined resets the inputs
this.setState({newInstance: undefined}, () => this.initFromProps(this.props));
}
save() {
if (this.props.onSave) this.props.onSave(this.state.newInstance);
}
goBack() {
const { dataCube, tab } = this.props;
var hash = window.location.hash;
window.location.hash = hash.replace(`/${dataCube.name}/${tab}`, '');
}
getIntrospectionStrategies(): ListItem[] {
const labels = DATA_CUBES_STRATEGIES_LABELS as any;
return [{
label: `Default (${labels[DataCube.DEFAULT_INTROSPECTION]})`,
value: undefined
}].concat(DataCube.INTROSPECTION_VALUES.map((value) => {
return {value, label: labels[value]};
}));
}
renderGeneral(): JSX.Element {
const { clusters } = this.props;
const { newInstance, errors } = this.state;
var makeLabel = FormLabel.simpleGenerator(LABELS, errors);
var makeTextInput = ImmutableInput.simpleGenerator(newInstance, this.delegate.onChange);
var makeDropDownInput = ImmutableDropdown.simpleGenerator(newInstance, this.delegate.onChange);
var possibleClusters = [
{ value: 'native', label: 'Load a file and serve it natively' }
].concat(clusters.map((cluster) => {
return { value: cluster.name, label: cluster.name };
}));
return <form className="general vertical">
{makeLabel('title')}
{makeTextInput('title', /.*/, true)}
{makeLabel('description')}
{makeTextInput('description')}
{makeLabel('clusterName')}
{makeDropDownInput('clusterName', possibleClusters)}
{makeLabel('source')}
{makeTextInput('source')}
{makeLabel('defaultTimezone')}
<ImmutableInput
instance={newInstance}
path={'defaultTimezone'}
onChange={this.delegate.onChange}
valueToString={(value: Timezone) => value ? value.toJS() : undefined}
stringToValue={(str: string) => str ? Timezone.fromJS(str) : undefined}
/>
</form>;
}
renderAttributes(): JSX.Element {
const { newInstance, errors } = this.state;
var makeLabel = FormLabel.simpleGenerator(LABELS, errors);
return <form className="general vertical">
{makeLabel('attributeOverrides')}
<ImmutableInput
instance={newInstance}
path={'attributeOverrides'}
onChange={this.delegate.onChange}
valueToString={(value: AttributeInfo[]) => value ? JSON.stringify(AttributeInfo.toJSs(value), null, 2) : undefined}
stringToValue={(str: string) => str ? AttributeInfo.fromJSs(JSON.parse(str)) : undefined}
type="textarea"
/>
</form>;
}
renderDimensions(): JSX.Element {
const { newInstance } = this.state;
const onChange = (newDimensions: List<Dimension>) => {
const newCube = newInstance.changeDimensions(newDimensions);
this.setState({
newInstance: newCube
});
};
const getModal = (item: Dimension) => <DimensionModal dimension={item}/>;
const getNewItem = () => Dimension.fromJS({
name: generateUniqueName('d', name => !newInstance.dimensions.find(m => m.name === name)),
title: 'New dimension'
});
const getRows = (items: List<Dimension>) => items.toArray().map((dimension) => {
return {
title: dimension.title,
description: dimension.expression.toString(),
icon: `dim-${dimension.kind}`
};
});
const DimensionsList = ImmutableList.specialize<Dimension>();
return <DimensionsList
label={STRINGS.dimensions}
items={newInstance.dimensions}
onChange={onChange.bind(this)}
getModal={getModal}
getNewItem={getNewItem}
getRows={getRows}
toggleSuggestions={this.toggleDimensionsSuggestions.bind(this)}
/>;
}
toggleDimensionsSuggestions() {
const { showDimensionsSuggestion } = this.state;
this.setState({
showDimensionsSuggestion: !showDimensionsSuggestion
});
}
addToCube(property: string, additionalValues: (Dimension | Measure)[]) {
const { newInstance } = this.state;
var newValues = additionalValues.concat((newInstance as any)[property].toArray());
this.setState({
newInstance: ImmutableUtils.setProperty(newInstance, property, List(newValues))
});
}
renderDimensionSuggestions() {
const { newInstance } = this.state;
return <SuggestionModal
onAdd={this.addToCube.bind(this, 'dimensions')}
onClose={this.toggleDimensionsSuggestions.bind(this)}
getLabel={(d: Dimension) => `${d.title} (${d.formula})`}
getOptions={newInstance.getSuggestedDimensions.bind(newInstance)}
title={`${STRINGS.dimension} ${STRINGS.suggestion}`}
/>;
}
toggleMeasuresSuggestions() {
const { showMeasuresSuggestion } = this.state;
this.setState({
showMeasuresSuggestion: !showMeasuresSuggestion
});
}
renderMeasures(): JSX.Element {
var { newInstance } = this.state;
const onChange = (newMeasures: List<Measure>) => {
var { defaultSortMeasure } = newInstance;
if (defaultSortMeasure) {
if (!newMeasures.find((measure) => measure.name === defaultSortMeasure)) {
newInstance = newInstance.changeDefaultSortMeasure(newMeasures.get(0).name);
}
}
const newCube = newInstance.changeMeasures(newMeasures);
this.setState({
newInstance: newCube
});
};
const getModal = (item: Measure) => <MeasureModal measure={item}/>;
const getNewItem = () => Measure.fromJS({
name: generateUniqueName('m', name => !newInstance.measures.find(m => m.name === name)),
title: 'New measure'
});
const getRows = (items: List<Measure>) => items.toArray().map((measure) => {
return {
title: measure.title,
description: measure.expression.toString(),
icon: `measure`
};
});
const MeasuresList = ImmutableList.specialize<Measure>();
return <MeasuresList
label={STRINGS.measures}
items={newInstance.measures}
onChange={onChange.bind(this)}
getModal={getModal}
getNewItem={getNewItem}
getRows={getRows}
toggleSuggestions={this.toggleMeasuresSuggestions.bind(this)}
/>;
}
renderMeasureSuggestions() {
const { newInstance } = this.state;
return <SuggestionModal
onAdd={this.addToCube.bind(this, 'measures')}
onClose={this.toggleMeasuresSuggestions.bind(this)}
getLabel={(m: Measure) => `${m.title} (${m.formula})`}
getOptions={newInstance.getSuggestedMeasures.bind(newInstance)}
title={`${STRINGS.measure} ${STRINGS.suggestion}`}
/>;
}
renderButtons(): JSX.Element {
const { dataCube, isNewDataCube } = this.props;
const { canSave, newInstance } = this.state;
const hasChanged = !dataCube.equals(newInstance);
const cancelButton = <Button
className="cancel"
title={isNewDataCube ? "Cancel" : "Revert changes"}
type="secondary"
onClick={this.cancel.bind(this)}
/>;
const saveButton = <Button
className={classNames("save", {disabled: !canSave || (!isNewDataCube && !hasChanged)})}
title={isNewDataCube ? "Create cube" : "Save"}
type="primary"
onClick={this.save.bind(this)}
/>;
if (!isNewDataCube && !hasChanged) {
return <div className="button-group">
{saveButton}
</div>;
}
return <div className="button-group">
{cancelButton}
{saveButton}
</div>;
}
getTitle(): string {
const { isNewDataCube } = this.props;
const { newInstance } = this.state;
const lastBit = newInstance.title ? `: ${newInstance.title}` : '';
return (isNewDataCube ? STRINGS.createDataCube : STRINGS.editDataCube) + lastBit;
}
render() {
const { dataCube, isNewDataCube } = this.props;
const { tab, newInstance, showDimensionsSuggestion, showMeasuresSuggestion } = this.state;
if (!newInstance || !tab || !dataCube) return null;
return <div className="data-cube-edit">
<div className="title-bar">
{isNewDataCube
? null
: <Button
className="button back"
type="secondary"
svg={require('../../../icons/full-back.svg')}
onClick={this.goBack.bind(this)}
/>
}
<div className="title">{this.getTitle()}</div>
{this.renderButtons()}
</div>
<div className="content">
<div className="tabs">
{this.renderTabs(tab)}
</div>
<div className="tab-content">
{tab.render.bind(this)()}
</div>
</div>
{showDimensionsSuggestion ? this.renderDimensionSuggestions() : null}
{showMeasuresSuggestion ? this.renderMeasureSuggestions() : null}
</div>;
}
} | the_stack |
import { join, normalize, basename, dirname, extname } from "path";
import { pathExists, copy, writeFile, writeJSON, readFile, readJSON, readdir, remove } from "fs-extra";
import {
SceneSerializer, ShaderMaterial, Mesh, Tools as BabylonTools, RenderTargetTexture, DynamicTexture,
MultiMaterial,
} from "babylonjs";
import { LGraph } from "litegraph.js";
import filenamify from "filenamify";
import { GraphAssets } from "../assets/graphs";
import { MeshesAssets } from "../assets/meshes";
import { PrefabAssets } from "../assets/prefabs";
import { ScriptAssets } from "../assets/scripts";
import { Editor } from "../editor";
import { FSTools } from "../tools/fs";
import { Tools } from "../tools/tools";
import { KTXTools, KTXToolsType } from "../tools/ktx";
import { MaterialTools } from "../tools/components/material";
import { Assets } from "../components/assets";
import { SceneSettings } from "../scene/settings";
import { SceneExportOptimzer } from "../scene/export-optimizer";
import { GraphCode } from "../graph/graph";
import { GraphCodeGenerator } from "../graph/generate";
import { Project } from "./project";
import { FilesStore } from "./files";
import { IProject } from "./typings";
import { WorkSpace } from "./workspace";
import { ProjectHelpers } from "./helpers";
import { MeshExporter } from "../export/mesh";
import { GeometryExporter } from "../export/geometry";
export interface IExportFinalSceneOptions {
/**
* defines the optional path where to save to final scene.
*/
destPath?: string;
/**
* Defines the root path applied on geometries in .babylon file in case of incremental loading.
*/
geometryRootPath?: string;
/**
* Defines wether or not files are forced to be re-generated.
*/
forceRegenerateFiles?: boolean;
/**
* Defines wether or not all compressed texture formats should be generated.
* Typically used when exporting final scene version.
*/
generateAllCompressedTextureFormats?: boolean;
}
export class ProjectExporter {
private static _IsSaving: boolean = false;
private static _IsExporting: boolean = false;
/**
* Asks the user where to export the project and exports the project in the selected folder.
* @param editor the editor reference.
*/
public static async SaveAs(editor: Editor): Promise<void> {
const path = await Tools.ShowSaveDialog(Project.Path);
Project.Path = join(path, "scene.editorproject");
Project.DirPath = path;
await Project.SetOpeningProject(Project.Path!);
await this.Save(editor);
}
/**
* Saves the project in the current location. If no path provided, a dialog will prompt to select
* the folder where to export the project.
* @param editor the editor reference.
* @param skipGenerateScene defines wether or not the generation of the scene should be skipped.
*/
public static async Save(editor: Editor, skipGenerateScene: boolean = false): Promise<void> {
if (this._IsSaving) { return; }
this._IsSaving = true;
try {
await this._Save(editor, skipGenerateScene);
} catch (e) {
console.error(e);
editor.console.logError(e.message);
}
this._IsSaving = false;
}
/**
* Eports the final scene.
* @param editor the editor reference.
* @param task defines the already existing task feedback to reuse.
* @param destPath defines the optional path where to save to final scene.
*/
public static async ExportFinalScene(editor: Editor, task?: string, options?: IExportFinalSceneOptions): Promise<void> {
if (this._IsExporting) {
return;
}
this._IsExporting = true;
try {
await this._ExportFinalScene(editor, task, options);
} catch (e) {
console.error(e);
editor.console.logError(e.message);
}
this._IsExporting = false;
}
/**
* Saves the project
*/
private static async _Save(editor: Editor, skipGenerateScene: boolean): Promise<void> {
if (!Project.Path) { return this.SaveAs(editor); }
// Check is isolated mode
if (editor.preview.state.isIsolatedMode) {
return editor.notifyMessage("Can't save when Isolated Mode is enabled.", 2000, "error");
}
editor.console.logSection("Exporting Project");
editor.console.logInfo(`Exporting project to: ${Project.DirPath}`);
editor.beforeSaveProjectObservable.notifyObservers(Project.DirPath!);
const task = editor.addTaskFeedback(0, "Saving Files...");
await Tools.Wait(500);
// Create project
const project: IProject = {
filesList: [],
cameras: [],
materials: [],
textures: [],
meshes: [],
transformNodes: [],
particleSystems: [],
lights: [],
sounds: [],
morphTargetManagers: [],
scene: ProjectHelpers.ExportSceneSettings(editor.scene!),
assets: {
meshes: MeshesAssets.Meshes.map((m) => m.name),
prefabs: PrefabAssets.Prefabs.map((p) => p.name),
graphs: GraphAssets.Graphs.map((g) => g.name),
},
project: {
camera: SceneSettings.Camera!.serialize(),
},
postProcesses: {
ssao: { enabled: SceneSettings.IsSSAOEnabled(), json: SceneSettings.SSAOPipeline?.serialize() },
screenSpaceReflections: { enabled: SceneSettings.IsScreenSpaceReflectionsEnabled(), json: SceneSettings.ScreenSpaceReflectionsPostProcess?.serialize() },
default: { enabled: SceneSettings.IsDefaultPipelineEnabled(), json: SceneSettings.DefaultPipeline?.serialize() },
motionBlur: { enabled: SceneSettings.IsMotionBlurEnabled(), json: SceneSettings.MotionBlurPostProcess?.serialize() },
},
physicsEnabled: Project.Project?.physicsEnabled ?? true,
};
const exportedMeshes: string[] = [];
const exportedLights: string[] = [];
const exportedShadows: string[] = [];
const exportedCameras: string[] = [];
const exportedMaterials: string[] = [];
const exportedTextures: string[] = [];
const exportedGeometries: string[] = [];
const exportedSounds: string[] = [];
const exportedTransformNodes: string[] = [];
const exportedParticleSystems: string[] = [];
const exportedMorphTargets: string[] = [];
let savePromises: Promise<void>[] = [];
// Write all files
const filesDir = join(Project.DirPath!, "files");
await FSTools.CreateDirectory(filesDir);
let progressValue = 0;
let progressCount = 100 / FilesStore.GetFilesCount();
for (const f in FilesStore.List) {
const file = FilesStore.List[f];
// Check if file still exists.
if (!await pathExists(file.path)) {
FilesStore.RemoveFileFromPath(file.path);
continue;
}
const dest = join(filesDir, file.name);
project.filesList.push(file.name);
if ((await pathExists(dest))) {
continue;
}
try {
await copy(file.path, dest);
editor.console.logInfo(`Copied resource file "${dest}"`);
} catch (e) {
editor.console.logError(`Failed to copy resource file "${file.path}" to "${dest}"`)
throw e;
}
editor.updateTaskFeedback(task, progressValue += progressCount);
}
// Write all morph target managers
const morphTargets: any[] = [];
const morphTargetsDir = join(Project.DirPath!, "morphTargets");
for (const mesh of editor.scene!.meshes) {
if (!(mesh instanceof Mesh)) { continue; }
const manager = mesh.morphTargetManager;
if (manager) {
morphTargets.push(manager.serialize());
}
}
if (morphTargets.length) {
editor.updateTaskFeedback(task, 0, "Saving Morph Target Managers");
progressValue = 0;
progressCount = 100 / morphTargets.length ?? 1;
await FSTools.CreateDirectory(morphTargetsDir);
for (const mtm of morphTargets) {
const morphTargetManagerDest = `${mtm.id}.json`;
savePromises.push(new Promise<void>(async (resolve) => {
await writeJSON(join(morphTargetsDir, morphTargetManagerDest), mtm, { encoding: "utf-8" });
project.morphTargetManagers!.push(morphTargetManagerDest);
exportedMorphTargets.push(morphTargetManagerDest);
editor.updateTaskFeedback(task, progressValue += progressCount);
editor.console.logInfo(`Saved morph target manager configuration "${mtm.id}"`);
resolve();
}));
}
await Promise.all(savePromises);
savePromises = [];
}
// Write all cameras
editor.updateTaskFeedback(task, 0, "Saving Cameras");
progressValue = 0;
progressCount = 100 / editor.scene!.cameras.length;
const camerasDir = join(Project.DirPath!, "cameras");
await FSTools.CreateDirectory(camerasDir);
for (const camera of editor.scene!.cameras) {
if (camera.doNotSerialize) { continue; }
const json = camera.serialize();
const dest = `${normalize(`${basename(filenamify(camera.name))}-${camera.id}`)}.json`;
await writeFile(join(camerasDir, dest), JSON.stringify(json, null, "\t"), { encoding: "utf-8" });
project.cameras.push(dest);
exportedCameras.push(dest);
editor.updateTaskFeedback(task, progressValue += progressCount);
editor.console.logInfo(`Saved camera configuration "${camera.name}"`);
}
// Write all textures
editor.updateTaskFeedback(task, 0, "Saving Textures");
progressValue = 0;
progressCount = 100 / editor.scene!.textures.length;
const texturesDir = join(Project.DirPath!, "textures");
await FSTools.CreateDirectory(texturesDir);
for (const texture of editor.scene!.textures) {
if (texture instanceof RenderTargetTexture || texture instanceof DynamicTexture) { continue; }
if (texture.name.indexOf("data:") === 0 || texture === editor.scene!.environmentBRDFTexture) { continue; }
savePromises.push(new Promise<void>(async (resolve) => {
const json = texture.serialize();
if (!json) { return resolve(); }
if (json.isCube && !json.isRenderTarget && json.files && json.metadata?.isPureCube) {
// Replace Urls
json.files = json.files.map((f) => join("files", basename(f)));
}
json.name = join("./", "files", basename(texture.name));
json.url = join("./", "files", basename(texture.name));
const dest = `${normalize(`${filenamify(basename(texture.name))}-${texture.uniqueId.toString()}`)}.json`;
await writeFile(join(texturesDir, dest), JSON.stringify(json, null, "\t"), { encoding: "utf-8" });
project.textures.push(dest);
exportedTextures.push(dest);
editor.updateTaskFeedback(task, progressValue += progressCount);
editor.console.logInfo(`Saved texture configuration "${texture.name}"`);
resolve();
}));
}
await Promise.all(savePromises);
savePromises = [];
// Write all materials
editor.updateTaskFeedback(task, 0, "Saving Materials");
progressValue = 0;
progressCount = 100 / (editor.scene!.materials.length + editor.scene!.multiMaterials.length);
const materialsDir = join(Project.DirPath!, "materials");
await FSTools.CreateDirectory(materialsDir);
const materials = editor.scene!.materials.concat(editor.scene!.multiMaterials);
for (const material of materials) {
if (material instanceof ShaderMaterial || material === editor.scene!.defaultMaterial || material.doNotSerialize) { continue; }
savePromises.push(new Promise<void>(async (resolve) => {
const json = material.serialize();
if (json.customType === "BABYLON.PBRMaterial" && json.environmentBRDFTexture) {
delete json.environmentBRDFTexture;
}
if (material.metadata) {
try {
json.metadata = Tools.CloneObject(material.metadata);
} catch (e) {
// Catch silently.
}
}
const dest = `${normalize(`${basename(filenamify(material.name))}-${material.id}`)}.json`;
await writeFile(join(materialsDir, dest), JSON.stringify(json, null, "\t"), { encoding: "utf-8" });
project.materials.push({
bindedMeshes: material.getBindedMeshes().map((m) => m.id),
json: dest,
isMultiMaterial: material instanceof MultiMaterial,
});
exportedMaterials.push(dest);
editor.updateTaskFeedback(task, progressValue += progressCount);
editor.console.logInfo(`Saved material configuration "${material.name}"`);
resolve();
}));
}
await Promise.all(savePromises);
savePromises = [];
// Reorder material to have multi materials at the end.
project.materials = project.materials.filter((m) => !m.isMultiMaterial).concat(project.materials.filter((m) => m.isMultiMaterial));
// Write all meshes
editor.updateTaskFeedback(task, 0, "Saving Meshes");
progressValue = 0;
progressCount = 100 / editor.scene!.meshes.length;
const meshesDir = join(Project.DirPath!, "meshes");
await FSTools.CreateDirectory(meshesDir);
const geometriesDir = join(Project.DirPath!, "geometries");
await FSTools.CreateDirectory(geometriesDir);
for (const mesh of editor.scene!.meshes) {
if (!(mesh instanceof Mesh) || mesh._masterMesh || mesh.doNotSerialize) { continue; }
savePromises.push(new Promise<void>(async (resolve) => {
const json = MeshExporter.ExportMesh(mesh);
exportedGeometries.push.apply(exportedGeometries, await GeometryExporter.ExportIncrementalGeometries(editor, geometriesDir, json, false));
for (const lod of json.lods) {
if (lod.mesh) {
exportedGeometries.push.apply(exportedGeometries, await GeometryExporter.ExportIncrementalGeometries(editor, geometriesDir, lod.mesh, false));
}
};
const dest = `${normalize(`${basename(filenamify(mesh.name))}-${mesh.id}`)}.json`;
await writeFile(join(meshesDir, dest), JSON.stringify(json, null, "\t"), { encoding: "utf-8" });
project.meshes.push(dest);
exportedMeshes.push(dest);
editor.updateTaskFeedback(task, progressValue += progressCount);
editor.console.logInfo(`Saved mesh configuration "${mesh.name}"`);
resolve();
}));
}
await Promise.all(savePromises);
savePromises = [];
// Write all lights
editor.updateTaskFeedback(task, 0, "Saving Lights");
progressValue = 0;
progressCount = 100 / editor.scene!.lights.length;
const lightsDir = join(Project.DirPath!, "lights");
await FSTools.CreateDirectory(lightsDir);
const shadowsDir = join(Project.DirPath!, "shadows");
await FSTools.CreateDirectory(shadowsDir);
for (const light of editor.scene!.lights) {
const lightJson = light.serialize();
const lightDest = `${normalize(`${basename(filenamify(light.name))}-${light.id}`)}.json`;
await writeFile(join(lightsDir, lightDest), JSON.stringify(lightJson, null, "\t"), { encoding: "utf-8" });
const shadowJson = light.getShadowGenerator()?.serialize();
if (shadowJson) {
const shadowDest = `${normalize(`${basename(filenamify(light.name))}-${light.id}`)}.json`;
await writeFile(join(shadowsDir, shadowDest), JSON.stringify(shadowJson, null, "\t"), { encoding: "utf-8" });
project.lights.push({ json: lightDest, shadowGenerator: shadowDest });
exportedShadows.push(shadowDest);
} else {
project.lights.push({ json: lightDest, shadowGenerator: undefined });
}
exportedLights.push(lightDest);
editor.updateTaskFeedback(task, progressValue += progressCount);
editor.console.logInfo(`Saved light configuration "${light.name}"`);
}
// Write all transform nodes
editor.updateTaskFeedback(task, 0, "Saving Transform Nodes");
progressValue = 0;
progressCount = 100 / editor.scene!.transformNodes.length;
const transformNodesDir = join(Project.DirPath!, "transform");
await FSTools.CreateDirectory(transformNodesDir);
for (const transform of editor.scene!.transformNodes) {
savePromises.push(new Promise<void>(async (resolve) => {
const json = transform.serialize();
const dest = `${normalize(`${filenamify(basename(transform.name))}-${transform.id}`)}.json`;
await writeFile(join(transformNodesDir, dest), JSON.stringify(json, null, "\t"), { encoding: "utf-8" });
project.transformNodes.push(dest);
exportedTransformNodes.push(dest);
editor.updateTaskFeedback(task, progressValue += progressCount);
editor.console.logInfo(`Saved transform node configuration "${transform.name}"`);
resolve();
}));
}
await Promise.all(savePromises);
savePromises = [];
// Write all particle systems
editor.updateTaskFeedback(task, 0, "Saving Particle Systems");
progressValue = 0;
progressCount = 100 / editor.scene!.particleSystems.length;
const particleSystemsDir = join(Project.DirPath!, "particleSystems");
await FSTools.CreateDirectory(particleSystemsDir);
for (const ps of editor.scene!.particleSystems) {
const json = ps.serialize(true);
const dest = `${normalize(`${basename(filenamify(ps.name))}-${ps.id}`)}.json`;
await writeFile(join(particleSystemsDir, dest), JSON.stringify(json, null, "\t"), { encoding: "utf-8" });
project.particleSystems!.push(dest);
exportedParticleSystems.push(dest);
editor.updateTaskFeedback(task, progressValue += progressCount);
editor.console.logInfo(`Saved particle system configuration "${ps.name}"`);
}
// Write all sounds
editor.updateTaskFeedback(task, 0, "Saving Sounds");
progressValue = 0;
progressCount = 100 / editor.scene!.mainSoundTrack.soundCollection.length;
const soundsDir = join(Project.DirPath!, "sounds");
await FSTools.CreateDirectory(soundsDir);
for (const s of editor.scene!.mainSoundTrack.soundCollection) {
const json = s.serialize();
json.url = basename(json.name);
const dest = `${normalize(`${basename(filenamify(s.name))}`)}.json`;
await writeFile(join(soundsDir, dest), JSON.stringify(json, null, "\t"), { encoding: "utf-8" });
project.sounds!.push(dest);
exportedSounds.push(dest);
editor.updateTaskFeedback(task, progressValue += progressCount);
editor.console.logInfo(`Saved sound configuration "${s.name}"`);
}
// Write assets cache
const assetsPath = join(Project.DirPath!, "assets");
await FSTools.CreateDirectory(assetsPath);
await writeFile(join(Project.DirPath!, "assets", "cache.json"), JSON.stringify(Assets.GetCachedData(), null, "\t"), { encoding: "utf-8" });
// Write project!
await writeFile(join(Project.DirPath!, "scene.editorproject"), JSON.stringify(project, null, "\t"), { encoding: "utf-8" });
// Update worksapce
if (WorkSpace.HasWorkspace()) {
await WorkSpace.WriteWorkspaceFile(Project.Path);
if (!skipGenerateScene && WorkSpace.Workspace!.generateSceneOnSave) {
await this.ExportFinalScene(editor, task);
}
}
// Save project configuration
Project.Project = project;
// Done!
editor.updateTaskFeedback(task, 100, "Done!");
editor.closeTaskFeedback(task, 500);
editor.console.logInfo(`Successfully saved project to: ${Project.DirPath!}`);
editor.afterSaveProjectObservable.notifyObservers(Project.DirPath!);
// Save editor config
editor._saveEditorConfig();
// Run clean in background
this._CleanOutputDir(camerasDir, exportedCameras);
this._CleanOutputDir(geometriesDir, exportedGeometries);
this._CleanOutputDir(lightsDir, exportedLights);
this._CleanOutputDir(materialsDir, exportedMaterials);
this._CleanOutputDir(meshesDir, exportedMeshes);
this._CleanOutputDir(shadowsDir, exportedShadows);
this._CleanOutputDir(texturesDir, exportedTextures);
this._CleanOutputDir(soundsDir, exportedSounds);
this._CleanOutputDir(particleSystemsDir, exportedParticleSystems);
this._CleanOutputDir(transformNodesDir, exportedTransformNodes);
this._CleanOutputDir(morphTargetsDir, exportedMorphTargets);
// Update recent projects to be shown in welcome wizard
this._UpdateWelcomeRecentProjects(editor);
}
/**
* Returns the final scene in its JSON representation.
* @param editor defines the reference to the editor.
*/
public static GetFinalSceneJson(editor: Editor): any {
// Sounds
if (editor.scene!.soundTracks?.indexOf(editor.scene!.mainSoundTrack) === -1) {
editor.scene!.soundTracks.push(editor.scene!.mainSoundTrack);
}
// Optimize
const optimizer = new SceneExportOptimzer(editor.scene!);
optimizer.optimize();
// Configure nodes that are not serializable.
Tools.getAllSceneNodes(editor.scene!).forEach((n) => {
if (n.metadata?.doNotExport === true) {
n.doNotSerialize = true;
}
});
const scene = SceneSerializer.Serialize(editor.scene!);
scene.metadata = scene.metadata ?? {};
scene.metadata.postProcesses = {
ssao: { enabled: SceneSettings.IsSSAOEnabled(), json: SceneSettings.SSAOPipeline?.serialize() },
screenSpaceReflections: { enabled: SceneSettings.IsScreenSpaceReflectionsEnabled(), json: SceneSettings.ScreenSpaceReflectionsPostProcess?.serialize() },
default: { enabled: SceneSettings.IsDefaultPipelineEnabled(), json: SceneSettings.DefaultPipeline?.serialize() },
motionBlur: { enabled: SceneSettings.IsMotionBlurEnabled(), json: SceneSettings.MotionBlurPostProcess?.serialize() },
};
// Set producer
scene.producer = {
file: "scene.babylon",
name: "Babylon.JS Editor",
version: `v${editor._packageJson.version}`,
exporter_version: `v${editor._packageJson.dependencies.babylonjs}`,
};
// Active camera
scene.activeCameraID = scene.cameras[0]?.id;
// LODs
scene.meshes?.forEach((m) => {
if (!m) { return; }
delete m.renderOverlay;
const exportedMeshMetadata = m.metadata;
const waitingUpdatedReferences = exportedMeshMetadata?._waitingUpdatedReferences;
if (waitingUpdatedReferences) {
delete m.metadata._waitingUpdatedReferences;
m.metadata = Tools.CloneObject(m.metadata);
exportedMeshMetadata._waitingUpdatedReferences = waitingUpdatedReferences;
}
const mesh = editor.scene!.getMeshByID(m.id);
if (!mesh || !(mesh instanceof Mesh)) { return; }
const lods = mesh.getLODLevels();
if (!lods.length) { return; }
m.lodMeshIds = lods.map((lod) => lod.mesh?.id);
m.lodDistances = lods.map((lod) => lod.distance);
m.lodCoverages = lods.map((lod) => lod.distance);
});
// Physics
scene.physicsEnabled = Project.Project?.physicsEnabled ?? true;
if (scene.physicsEngine && WorkSpace.Workspace?.physicsEngine) {
scene.physicsEngine = WorkSpace.Workspace?.physicsEngine;
}
scene.meshes?.forEach((m) => {
const existingMesh = editor.scene!.getMeshByID(m.id);
if (!existingMesh) { return; }
if (scene.physicsEnabled) {
if (existingMesh.physicsImpostor) {
m.physicsRestitution = existingMesh.physicsImpostor.getParam("restitution");
}
} else {
delete m.physicsImpostor;
delete m.physicsMass;
delete m.physicsFriction;
delete m.physicsRestitution;
}
m.instances?.forEach((i) => {
const instance = existingMesh._scene.getMeshByID(i.id);
if (!instance?.physicsImpostor) { return; }
if (scene.physicsEnabled) {
i.physicsRestitution = instance.physicsImpostor.getParam("restitution");
} else {
delete i.physicsImpostor;
delete i.physicsMass;
delete i.physicsFriction;
delete i.physicsRestitution;
}
});
});
// Skeletons
scene.skeletons?.forEach((s) => {
s.bones?.forEach((b) => {
if (!b.metadata) { return; }
b.id = b.metadata.originalId;
});
});
// PBR materials
scene.materials?.forEach((m) => {
if (m.customType === "BABYLON.PBRMaterial" && m.environmentBRDFTexture) {
delete m.environmentBRDFTexture;
}
});
// Clean
optimizer.clean();
// Restore nodes that are not serialized.
Tools.getAllSceneNodes(editor.scene!).forEach((n) => {
if (n.metadata?.doNotExport === true) {
n.doNotSerialize = false;
}
});
return scene;
}
/**
* Returns the location of the exported scene on the file system.
*/
public static GetExportedSceneLocation(): string {
const projectName = basename(dirname(WorkSpace.Workspace!.lastOpenedScene));
return join(WorkSpace.DirPath!, "scenes", projectName);
}
/**
* Returns the list of ALL exported files located in the scene's output folder.
* @scenePath defines the absolute path to the .babylon file.
* @filesPaths defines the list of all resource files of the scene.
*/
public static async ListExportedFiles(): Promise<{ scenePath: string; filesPaths: string[]; incrementalFiles: string[]; }> {
const projectName = basename(dirname(WorkSpace.Workspace!.lastOpenedScene));
const projectDir = join(WorkSpace.DirPath!, "scenes", projectName);
const scenePath = join(projectDir, "scene.babylon");
const filesPaths = (await readdir(join(projectDir, "files"))).map((f) => join(projectDir, "files", f));
let incrementalFiles: string[] = [];
if (WorkSpace.Workspace!.useIncrementalLoading) {
incrementalFiles = (await readdir(join(projectDir, "geometries"))).map((f) => join(projectDir, "geometries", f));
}
return { scenePath, filesPaths, incrementalFiles };
}
/**
* Exports the current scene into .babylon format including only geometries.
* @param editor defines the editor reference.
*/
public static async ExportFinalSceneOnlyGeometries(editor: Editor): Promise<void> {
// Generate scene
const scene = this.GetFinalSceneJson(editor);
if (!scene) { return; }
scene.materials = [];
scene.lights = [];
scene.cameras = [];
scene.shadowGenerators = [];
scene.particleSystems = [];
scene.meshes?.forEach((m) => m.materialId = null);
// Save
let destPath = await Tools.ShowSaveFileDialog("Save Scene (Only Geometries)");
if (!destPath) { return; }
if (extname(destPath).toLowerCase() !== ".babylon") {
destPath += ".babylon";
}
await writeFile(destPath, JSON.stringify(scene), { encoding: "utf-8" });
}
/**
* Exports the final scene and asks for the destination folder.
* @param editor defines the reference to the editor.
*/
public static async ExportFinalSceneAs(editor: Editor): Promise<void> {
const destPath = await Tools.ShowSaveDialog();
if (!destPath) { return; }
return this.ExportFinalScene(editor, undefined, { destPath });
}
/**
* Eports the final scene.
*/
private static async _ExportFinalScene(editor: Editor, task?: string, options?: IExportFinalSceneOptions): Promise<void> {
if (!WorkSpace.HasWorkspace()) { return; }
// Check is isolated mode
if (editor.preview.state.isIsolatedMode) {
return editor.notifyMessage("Can't export when Isolated Mode is enabled.", 2000, "error");
}
editor.console.logSection("Exporting Final Scene");
task = task ?? editor.addTaskFeedback(0, "Generating Final Scene");
editor.updateTaskFeedback(task, 0, "Generating Final Scene");
editor.console.logInfo("Serializing scene...");
const scene = this.GetFinalSceneJson(editor);
await FSTools.CreateDirectory(join(WorkSpace.DirPath!, "scenes"));
const scenePath = options?.destPath ?? this.GetExportedSceneLocation();
await FSTools.CreateDirectory(scenePath);
const destFilesDir = join(scenePath, "files");
editor.updateTaskFeedback(task, 50);
editor.beforeGenerateSceneObservable.notifyObservers(scenePath);
// Handle incremental loading
const geometriesPath = join(scenePath, "geometries");
const incrementalFolderExists = await pathExists(geometriesPath);
if (incrementalFolderExists) {
const incrementalFiles = await readdir(geometriesPath);
try {
await Promise.all(incrementalFiles.map((f) => remove(join(geometriesPath, f))));
} catch (e) {
editor.console.logError("Failed to remove incremental geometry file");
}
}
if (!WorkSpace.Workspace?.useIncrementalLoading) {
try {
await remove(geometriesPath);
} catch (e) {
editor.console.logError("Failed to remove geometries output folder.");
}
} else {
if (!incrementalFolderExists) {
await FSTools.CreateDirectory(geometriesPath);
}
await GeometryExporter.ExportIncrementalGeometries(editor, geometriesPath, scene, true, options?.geometryRootPath, task);
}
await FSTools.CreateDirectory(destFilesDir);
// Handle node material textures
editor.updateTaskFeedback(task, 70, "Generating Node Material textures...");
const copiedFiles: string[] = [
...await MaterialTools.ExportSerializedNodeMaterialsTextures(editor, scene.materials, scenePath),
];
// Write scene
editor.updateTaskFeedback(task, 50, "Writing scene...");
await writeJSON(join(scenePath, "scene.babylon"), scene);
// Copy files
let step = FilesStore.GetFilesCount() / 50;
let progress = 50;
for (const f in FilesStore.List) {
const file = FilesStore.List[f];
const dest = join(destFilesDir, file.name);
copiedFiles.push(file.name);
if (!options?.forceRegenerateFiles && await pathExists(dest)) {
continue;
}
try {
await copy(file.path, dest);
editor.console.logInfo(`Copied resource file to: ${dest}`);
} catch (e) {
editor.console.logError(`Failed to copy resource file "${file.path}" to "${dest}"`);
}
editor.updateTaskFeedback(task, progress += step);
}
// Create ktx compressed textures
const ktx2CompressedTextures = WorkSpace.Workspace!.ktx2CompressedTextures;
const forcedFormat = ktx2CompressedTextures?.forcedFormat ?? "automatic";
const supportedTextureFormat = (forcedFormat !== "automatic" ? forcedFormat : editor.engine!.texturesSupported[0]) as KTXToolsType;
if (supportedTextureFormat && ktx2CompressedTextures?.enabled && ktx2CompressedTextures.pvrTexToolCliPath) {
const filesToCompress = copiedFiles.slice();
const promises: Promise<void | void[]>[] = [];
step = filesToCompress.length / 100;
progress = 0;
editor.updateTaskFeedback(task, 0, "Compressing Textures...");
for (const f of filesToCompress) {
const dest = join(destFilesDir, f);
copiedFiles.push(basename(KTXTools.GetKtxFileName(dest, "-astc.ktx")));
copiedFiles.push(basename(KTXTools.GetKtxFileName(dest, "-dxt.ktx")));
copiedFiles.push(basename(KTXTools.GetKtxFileName(dest, "-pvrtc.ktx")));
copiedFiles.push(basename(KTXTools.GetKtxFileName(dest, "-etc1.ktx")));
copiedFiles.push(basename(KTXTools.GetKtxFileName(dest, "-etc2.ktx")));
if (options?.generateAllCompressedTextureFormats) {
if (promises.length >= 5) {
await Promise.all(promises);
promises.splice(0);
}
promises.push(Promise.all([
KTXTools.CompressTexture(editor, dest, destFilesDir, "-astc.ktx"),
KTXTools.CompressTexture(editor, dest, destFilesDir, "-dxt.ktx"),
KTXTools.CompressTexture(editor, dest, destFilesDir, "-pvrtc.ktx"),
KTXTools.CompressTexture(editor, dest, destFilesDir, "-etc1.ktx"),
KTXTools.CompressTexture(editor, dest, destFilesDir, "-etc2.ktx"),
]).then(() => {
editor.updateTaskFeedback(task!, progress += step);
}));
} else {
const ktxFilename = KTXTools.GetKtxFileName(dest, supportedTextureFormat);
if (!options?.forceRegenerateFiles && await pathExists(ktxFilename)) {
editor.updateTaskFeedback(task!, progress += step);
continue;
}
promises.push(KTXTools.CompressTexture(editor, dest, destFilesDir, supportedTextureFormat).then(() => {
editor.updateTaskFeedback(task!, progress += step);
}));
}
}
if (promises.length) {
await Promise.all(promises);
}
}
// Clean unused files
try {
const existingFiles = await readdir(destFilesDir);
for (const existingFile of existingFiles) {
if (copiedFiles.indexOf(existingFile) !== -1) {
continue;
}
const removePath = join(destFilesDir, existingFile);
try {
await remove(removePath);
editor.console.logInfo(`Removed unused resource file ${removePath}`);
} catch (e) {
// Catch silently.
editor.console.logError(`Failed to remove unused resource file ${removePath}`);
}
}
} catch (e) {
// Catch silently.
}
// Tools
await this.GenerateScripts(editor);
editor.updateTaskFeedback(task, 100);
editor.closeTaskFeedback(task, 1000);
editor.afterGenerateSceneObservable.notifyObservers(scenePath);
editor.console.logInfo(`Successfully generated scene at ${scenePath}`);
}
/**
* Generates the scripts for the project. Will wirte the "tools.ts" file and all index.ts files.
* @param editor defines the reference to the editor.
*/
public static async GenerateScripts(editor: Editor): Promise<void> {
// Copy tools
editor.console.logInfo("Copyging tools...");
const decorators = await readFile(join(Tools.GetAppPath(), "assets", "scripts", "decorators.ts"), { encoding: "utf-8" });
await writeFile(join(WorkSpace.DirPath!, "src", "scenes", "decorators.ts"), decorators, { encoding: "utf-8" });
const tools = await readFile(join(Tools.GetAppPath(), "assets", "scripts", "tools.ts"), { encoding: "utf-8" });
const finalTools = tools// .replace("// ${decorators}", decorators)
.replace("${editor-version}", editor._packageJson.version);
await writeFile(join(WorkSpace.DirPath!, "src", "scenes", "tools.ts"), finalTools, { encoding: "utf-8" });
// Export scripts
editor.console.logInfo("Configuring scripts...");
const scriptsMap = await readFile(join(Tools.GetAppPath(), "assets", "scripts", "scripts-map.ts"), { encoding: "utf-8" });
const newScriptsMap = await this._UpdateScriptContent(editor, scriptsMap);
await writeFile(join(WorkSpace.DirPath!, "src", "scenes", "scripts-map.ts"), newScriptsMap, { encoding: "utf-8" });
// Export scene content
editor.console.logInfo("Configuring scene entry point...");
const scriptsContent = await readFile(join(Tools.GetAppPath(), "assets", "scripts", "scene", "index.ts"), { encoding: "utf-8" });
const indexPath = join(WorkSpace.DirPath!, "src", "scenes", WorkSpace.GetProjectName());
await FSTools.CreateDirectory(indexPath);
await writeFile(join(indexPath, "index.ts"), scriptsContent, { encoding: "utf-8" });
}
/**
* Exports all available graphs in the scene.
* @param editor defines the reference to the editor.
*/
public static async ExportGraphs(editor: Editor): Promise<void> {
// Write all graphs
const destGraphs = join(WorkSpace.DirPath!, "src", "scenes", WorkSpace.GetProjectName(), "graphs");
await FSTools.CreateDirectory(destGraphs);
const graphs = editor.assets.getAssetsOf(GraphAssets);
if (graphs?.length) {
GraphCode.Init();
await GraphCodeGenerator.Init();
}
for (const g of graphs ?? []) {
const extension = extname(g.id);
const name = g.id.replace(extension, "");
const json = await readJSON(g.key);
try {
const code = GraphCodeGenerator.GenerateCode(new LGraph(json))?.replace("${editor-version}", editor._packageJson.version);
await writeFile(join(destGraphs, `${name}.ts`), code);
} catch (e) {
console.error(e);
}
}
}
/**
* Cleans the given output dir.
*/
public static async _CleanOutputDir(directory: string, exportedFiles: string[]): Promise<void> {
try {
const outputFiles = await readdir(directory);
for (const outputFile of outputFiles) {
if (!exportedFiles.find((ef) => basename(ef) === outputFile)) {
remove(join(directory, outputFile));
}
}
} catch (e) {
// Catch silently.
}
}
/**
* Updates the script content to be written.
*/
private static async _UpdateScriptContent(editor: Editor, scriptsContent: string): Promise<string> {
// Write all graphs.
await this.ExportGraphs(editor);
// Export scripts.
const all = await ScriptAssets.GetAllScripts();
return scriptsContent.replace("${editor-version}", editor._packageJson.version).replace("// ${scripts}", all.map((s) => {
const toReplace = `src/scenes/`;
const extension = extname(s);
return `\t"${s}": require("./${s.replace(toReplace, "").replace(extension, "")}"),`;
}).join("\n")).replace("// ${scriptsInterface}", all.map((s) => {
return `\t"${s}": any;`;
}).join("\n"));
}
/**
* Updates the list of recent projects.
*/
private static async _UpdateWelcomeRecentProjects(editor: Editor): Promise<void> {
editor.scene!.render();
const preview = await BabylonTools.CreateScreenshotAsync(editor.engine!, editor.scene!.activeCamera!, {
width: 3840,
height: 2160
});
const data = localStorage.getItem("babylonjs-editor-welcome");
const welcome = data ? JSON.parse(data) : [];
const path = WorkSpace.Path ?? Project.Path;
const item = welcome.find((w) => w.path === path);
if (item) {
item.preview = preview;
} else {
welcome.splice(0, 0, { path, preview });
}
if (welcome.length > 3) { welcome.pop(); }
localStorage.setItem("babylonjs-editor-welcome", JSON.stringify(welcome));
}
} | the_stack |
import _ from 'lodash';
import { FileTypes, Folder, IndexContent, ResourceAbstract } from '@foxpage/foxpage-server-types';
import { config } from '../../app.config';
import { PRE, TAG, TYPE } from '../../config/constant';
import * as Service from '../services';
import { ContentPath } from '../types/component-types';
import { NewResourceDetail } from '../types/file-types';
import { FoxCtx, IdVersion } from '../types/index-types';
import { VersionNumber } from '../types/content-types';
import { formatToPath, generationId } from '../utils/tools';
import { PluginService } from './plugin-services';
interface ResourceVersionDetail {
id: string;
name: string;
version: string;
}
export class ResourceService {
private static _instance: ResourceService;
private plugins: ResourceAbstract;
constructor() {
const pluginInstance = PluginService.getInstance();
this.plugins = pluginInstance.plugins;
}
/**
* Single instance
* @returns ResourceService
*/
public static getInstance (): ResourceService {
this._instance || (this._instance = new ResourceService());
return this._instance;
}
async getResourceRemoteUrl (type: string, groupConfig: Record<string, string>): Promise<string> {
const remoteUrls = await this.plugins.resourceRemoteUrl(Object.assign(
{ resourceConfig: config.resourceConfig },
{
groupConfig,
type
}
));
if (_.isArray(remoteUrls)) {
const typeUrl = _.find(remoteUrls, { type });
return typeUrl?.url || '';
}
return '';
}
/**
* Get resource list
* @param {any} options
* @returns Promise
*/
async getResourceList (options: any): Promise<IndexContent> {
if (_.has(this.plugins, 'resourceList')) {
const resourceData = await this.plugins.resourceList(options);
let typeResource: IndexContent | undefined = undefined;
if (_.isArray(resourceData)) {
typeResource = _.find(resourceData, { type: options.type });
}
return typeResource || { group: '', packages: [] };
} else {
return { group: '', packages: [] };
}
}
/**
* Get the special resource group latest version from remote
* @param {string} groupFolderId
* @param {any} options
* @returns Promise
*/
async getResourceGroupLatestVersion (
groupFolderId: string,
options?: { name?: string; packageName?: string, proxy?: string },
): Promise<NewResourceDetail[]> {
const config = await this.getGroupConfig(groupFolderId);
const [remoteGroupResources, localeGroupResources] = await Promise.all([
this.getResourceList(Object.assign(config, options)),
this.getGroupResourceMaxVersion(groupFolderId),
]);
// Check if resource has new version
let isNewVersion: boolean = false;
let resourceList: NewResourceDetail[] = [];
for (const resource of remoteGroupResources.packages || []) {
isNewVersion = false;
if (
resource.foxpage?.resourceName &&
(!localeGroupResources[resource.foxpage.resourceName] ||
localeGroupResources[resource.foxpage.resourceName].version !== resource.foxpage?.version)
) {
isNewVersion = true;
}
resourceList.push(
Object.assign(
_.pick(resource.foxpage, ['name', 'version', 'resourceName', 'meta', 'schema']),
{
id: localeGroupResources[resource.foxpage?.resourceName || '']?.id || undefined,
latestVersion: localeGroupResources[resource.foxpage?.resourceName || '']?.version || '',
files: resource.files || {},
isNew: isNewVersion,
}),
);
}
return _.orderBy(resourceList, ['isNew', 'name'], ['desc', 'asc']);
}
/**
* Get the resource group configs, include app config, group config and common resource config
* @param {string} groupFolderId
* @returns Promise
*/
async getGroupConfig (groupFolderId: string): Promise<Record<string, any>> {
const folderDetail = await Service.folder.info.getDetailById(groupFolderId);
let resourceId = '';
let groupConfig = {};
(folderDetail?.tags || []).forEach((tag) => {
if (tag.type === TAG.RESOURCE_GROUP) {
resourceId = tag.resourceId;
} else if (tag.type === TAG.RESOURCE_CONFIG) {
groupConfig = _.omit(tag, ['type']);
}
});
let groupType = '';
let appConfig = {};
const groupName = folderDetail.name;
if (resourceId && folderDetail.applicationId) {
const appDetail = await Service.application.getDetailById(folderDetail.applicationId);
const appResourceDetail = _.filter(appDetail.resources || [], { id: resourceId })?.[0] || {};
appConfig = appResourceDetail.detail;
groupType = appResourceDetail.name || '';
}
return {
type: groupType.toLowerCase(),
name: groupName,
resourceConfig: config.resourceConfig,
groupConfig,
appConfig,
};
}
/**
* Get the special resource group child max version info
* response {'resourceId': {id,name,version}}
* @param {string} groupFolderId
* @returns Promise
*/
async getGroupResourceMaxVersion (groupFolderId: string): Promise<Record<string, ResourceVersionDetail>> {
const resourceList = await Service.folder.list.find({ parentFolderId: groupFolderId, deleted: false });
const resourceIds = _.map(resourceList, 'id');
const resourceMaxVersion = await this.getResourceMaxVersion(resourceIds);
let resourceVersionObject: Record<string, ResourceVersionDetail> = {};
for (const resource of resourceList) {
resourceVersionObject[resource.name] = {
id: resource.id,
name: resource.name,
version: resourceMaxVersion[resource.id]?.version || '',
};
}
return resourceVersionObject;
}
/**
* Check if remote resource exist in db, include check version [folder level]
* @param {NewResourceDetail[]} resourceList
* @param {{applicationId:string;id:string}} options
* @returns Promise
*/
async checkRemoteResourceExist (
resourceList: NewResourceDetail[],
options: { applicationId: string; id: string },
): Promise<Record<string, any>> {
let idVersions: { id: string; version: string; resourceName: string }[] = [];
let resourceNames: string[] = [];
(resourceList || []).forEach((resource) => {
resource.id
? idVersions.push(_.pick(<Required<NewResourceDetail>>resource, ['id', 'version', 'resourceName']))
: resourceNames.push(resource.resourceName);
});
// Check resource with id, version
let checkResourceVersionParams = [];
for (const item of idVersions) {
checkResourceVersionParams.push({ parentFolderId: item.id, name: item.version });
}
let existIdVersions: Folder[] = [];
if (checkResourceVersionParams.length > 0) {
existIdVersions = await Service.folder.list.find({
applicationId: options.applicationId,
$or: checkResourceVersionParams,
deleted: false
});
}
if (existIdVersions.length > 0) {
// Get all children content ids
const versionFolderIds = _.map(existIdVersions, 'id');
const childrenData = await Service.folder.list.getAllChildrenRecursive({
folderIds: versionFolderIds,
depth: 5,
hasContent: true,
});
let versionContentIdObject: Record<string, any> = {};
for (const existVersion of existIdVersions) {
const contentPathObject = this.formatRecursiveToPath(childrenData[existVersion.id]);
versionContentIdObject[existVersion.parentFolderId] = _.invert(contentPathObject);
}
return { code: 1, data: _.map(existIdVersions, 'name'), contentPath: versionContentIdObject };
}
// Check resource with name
let existResourceNames: Folder[] = [];
if (resourceNames.length > 0) {
existResourceNames = await Service.folder.list.find({
applicationId: options.applicationId,
parentFolderId: options.id,
name: { $in: resourceNames },
deleted: false,
});
}
if (existResourceNames.length > 0) {
return { code: 2, data: _.map(existResourceNames, 'name') };
}
return { code: 0 };
}
/**
* Bulk save resources, include resources all children details
* @param {any[]} resourceList
* @param {{ctx:FoxCtx;applicationId:string;folderId:string}} options
* @returns void
*/
saveResources (
resourceList: NewResourceDetail[],
options: { ctx: FoxCtx; applicationId: string; folderId: string },
): Record<string, any> {
let resourceContentIdMap: Record<string, any> = {};
for (const resource of resourceList) {
let resourceId = resource.id || '';
if (!resourceId) {
// Create resource
const resourceDetail = Service.folder.info.create(
{
name: resource.resourceName || '',
intro: resource.name || '',
applicationId: options.applicationId || '',
parentFolderId: options.folderId || '',
},
{ ctx: options.ctx },
);
resourceId = resourceDetail.id;
}
// Create resource version
const versionFolderDetail = Service.folder.info.create(
{
name: resource.version || '',
applicationId: options.applicationId || '',
parentFolderId: resourceId,
},
{ ctx: options.ctx },
);
// Create resource file and version
resourceContentIdMap[resource.resourceName] = this.addResourceChildrenRecursive(resource.files, {
ctx: options.ctx,
folderId: versionFolderDetail.id,
applicationId: options.applicationId,
});
}
return resourceContentIdMap;
}
/**
* Create resource children details, include folder, file, content and version infos
* @param {any} resourceChildren
* @param {{ctx:FoxCtx;applicationId:string;folderId:string}} options
* @returns response content id and path mapping object, eg
* {umd:{'style.css':'cont_xxxx'},cjs:{'production.js':'cont_xxxx'}}
*/
addResourceChildrenRecursive (
resourceChildren: any,
options: { ctx: FoxCtx; applicationId: string; folderId: string },
): Record<string, any> {
let contentIdMap: Record<string, any> = {};
for (const name in resourceChildren) {
if (_.isString(resourceChildren[name])) {
const fileDetail = Service.file.info.create(
{
name: name,
applicationId: options.applicationId,
folderId: options.folderId || '',
type: TYPE.RESOURCE as FileTypes,
},
{ ctx: options.ctx },
);
// Create content
const contentId = generationId(PRE.CONTENT);
contentIdMap[name] = contentId;
Service.content.info.create(
{ id: contentId, title: name, fileId: fileDetail.id },
{ ctx: options.ctx },
);
// Create version
Service.version.info.create(
{
contentId: contentId,
content: { id: contentId, realPath: resourceChildren[name] },
},
{ ctx: options.ctx },
);
} else {
const folderDetail = Service.folder.info.create(
{
name: name,
applicationId: options.applicationId || '',
folderPath: formatToPath(<string>name),
parentFolderId: options.folderId,
},
{ ctx: options.ctx },
);
// Recursive create children
const childrenContentIdMap = this.addResourceChildrenRecursive(resourceChildren[name], {
ctx: options.ctx,
folderId: folderDetail.id,
applicationId: options.applicationId,
});
contentIdMap[name] = childrenContentIdMap;
}
}
return contentIdMap;
}
/**
* Get the special resource folder data, include version folder, file
* then format to path, eg.
* fold_xxx:[
* {id:'cont_xxx1', path: 'resourceGroup/bg-banner-container/0.2.0/schema.json'},
* {id:'cont_xxx2', path: 'resourceGroup/bg-banner-container/0.2.0/cjs/production.js'}
* {id:'cont_xxx3', path: 'resourceGroup/bg-banner-container/0.2.0/umd/style.css'}
* ]
* @param {string} groupId
* @param {{id:string;version:string}[]} idVersions
* @param {} Record<string
* @returns Promise
*/
async getResourceVersionDetail (
groupId: string,
idVersions: IdVersion[],
): Promise<Record<string, ContentPath[]>> {
const searchParams = _.map(idVersions, (item) => {
return { parentFolderId: item.id, name: item.version };
});
const [resourceGroupInfo, resourceFolderList, resourceParentFolderList] = await Promise.all([
Service.folder.info.getDetailById(groupId),
Service.folder.list.find({ $or: searchParams, deleted: false }),
Service.folder.list.getDetailByIds(_.map(idVersions, 'id')),
]);
const groupPath = resourceGroupInfo?.folderPath || '';
const resourceVersionIds = _.map(resourceFolderList, 'id');
const resourceFolderObject = _.keyBy(resourceFolderList, 'id');
const resourceParentFolderObject = _.keyBy(resourceParentFolderList, 'id');
// Get resource version all children folder, file, content and version info
const childrenInfo = await Service.folder.list.getAllChildrenRecursive({
folderIds: resourceVersionIds,
depth: 5,
hasContent: true,
});
let resourcePathObject: Record<string, ContentPath[]> = {};
for (const folderId in childrenInfo) {
const parentFolderId = resourceFolderObject[folderId]?.parentFolderId || '';
if (parentFolderId) {
const resourcePath = this.formatRecursiveToPath(childrenInfo[folderId]);
resourcePathObject[parentFolderId] = [];
_.forIn(resourcePath, (path, key) => {
resourcePathObject[parentFolderId].push({
contentId: key,
path: [
groupPath,
resourceParentFolderObject?.[parentFolderId]?.folderPath || '',
resourceFolderObject[folderId].folderPath,
path,
].join('/'),
});
});
}
}
return resourcePathObject;
}
formatRecursiveToPath (childrenInfo: any): Record<string, string> {
let resourcePath: Record<string, string> = {};
if ((childrenInfo?.files || []).length > 0) {
childrenInfo.files.forEach((file: any) => {
file.contents?.[0]?.id && (resourcePath[file.contents[0].id] = file.name);
});
}
if ((childrenInfo?.folders || []).length > 0) {
childrenInfo.folders.forEach((child: any) => {
if (child?.children) {
const childResourcePath = this.formatRecursiveToPath(child.children);
_.forIn(childResourcePath, (item, key) => {
childResourcePath[key] = child.folderPath + '/' + item;
});
resourcePath = _.merge(resourcePath, childResourcePath);
}
});
}
return resourcePath;
}
/**
* Get resource content id by path
* @param parentId
* @param pathArr
* @returns
*/
async getContentIdByPath (parentId: string, pathArr?: string[]): Promise<string> {
let contentId = '';
if (pathArr && pathArr.length > 1) {
const folderDetail = await Service.folder.info.getDetail({ parentFolderId: parentId, name: pathArr[0], deleted: false });
contentId = await this.getContentIdByPath(folderDetail.id, _.drop(pathArr));
} else if (pathArr && pathArr.length === 1) {
const fileDetail = await Service.file.info.getDetail({ folderId: parentId, name: pathArr[0], deleted: false });
contentId = await this.getContentIdByPath(fileDetail.id);
} else {
const contentDetail = await Service.content.info.getDetail({ fileId: parentId, deleted: false });
contentId = contentDetail?.id || '';
}
return contentId;
}
/**
* Get resource max version infos
* @param resourceIds
* @returns
*/
async getResourceMaxVersion (resourceIds: string[]): Promise<Record<string, VersionNumber>> {
const resourceVersions = await Service.folder.list.find({
parentFolderId: { $in: resourceIds },
deleted: false,
});
let maxResourceVersion: Record<string, VersionNumber> = {};
resourceVersions.forEach(version => {
const versionNumber = Service.version.number.createNumberFromVersion(_.trim(version.name));
if (!maxResourceVersion[version.parentFolderId]) {
maxResourceVersion[version.parentFolderId] = { version: version.name, versionNumber };
} else if (maxResourceVersion[version.parentFolderId].versionNumber < versionNumber) {
maxResourceVersion[version.parentFolderId] = { version: version.name, versionNumber };
}
});
return maxResourceVersion;
}
} | the_stack |
import {
Component,
Prop,
State,
Watch,
Element,
Event,
EventEmitter,
h,
} from '@stencil/core';
import throttle from 'lodash.throttle';
import { IResizeEvent } from './interfaces';
@Component({
tag: 'split-me',
styleUrl: 'split-me.css',
shadow: true,
})
export class SplitMe {
@Element()
el: HTMLElement;
/**
* The number of slots in the splitter.
*/
@Prop()
n: number = 1;
/**
* The direction of the splitter.
*/
@Prop()
d: 'horizontal' | 'vertical' = 'horizontal';
/**
* Prevent the splitter from being resized.
*/
@Prop()
fixed: boolean = false;
/**
* The initial sizes of the slots.
* Acceptable formats are: `sizes="0.33, 0.67"` or `sizes="50%, 25%, 25%"`
*/
@Prop()
sizes: string | number[] = '';
/**
* The minimum sizes of the slots.
* Same format as `sizes`
*/
@Prop()
minSizes: string | number[] = '';
/**
* The maximum sizes of the slots.
* Same format as `sizes`
*/
@Prop()
maxSizes: string | number[] = '';
/**
* The minimum time (in ms) between resize events while dragging.
*/
@Prop()
throttle: number = 0;
@State()
slotEnd: number[];
/**
* Emitted every time dragging causes the slots to resize
*/
@Event()
slotResized: EventEmitter<IResizeEvent>;
@Watch('n')
watchN() {
this.nChanged = true;
}
@Watch('sizes')
watchSizes() {
this.sizesChanged = true;
}
@Watch('minSizes')
watchMinSizes() {
this.minSizesChanged = true;
}
@Watch('maxSizes')
watchMaxSizes() {
this.maxSizesChanged = true;
}
@Watch('throttle')
watchThrottle(curr: number) {
this.throttledResize = throttle(this.resize.bind(this), curr);
}
throttledResize: Function;
minSizesArr: number[];
maxSizesArr: number[];
nChanged: boolean = false;
sizesChanged: boolean = false;
minSizesChanged: boolean = false;
maxSizesChanged: boolean = false;
componentWillLoad() {
this.throttledResize = throttle(this.resize.bind(this), this.throttle);
// Validate the sizes attribute
let sizes: number[] = this.parseSizes(this.sizes);
if (sizes.length === this.n) {
this.slotEnd = this.assignedSlotEnd(sizes);
} else {
this.slotEnd = this.defaultSlotEnd(this.n);
}
// Validate the minSize attribute
let minSizes: number[] = this.parseSizes(this.minSizes);
if (minSizes.length === this.n) {
this.minSizesArr = minSizes;
} else {
this.minSizesArr = this.defaultMinSizes(this.n);
}
// Validate the maxSize attribute
let maxSizes: number[] = this.parseSizes(this.maxSizes);
if (maxSizes.length === this.n) {
this.maxSizesArr = maxSizes;
} else {
this.maxSizesArr = this.defaultMaxSizes(this.n);
}
}
componentWillUpdate() {
// Validate the new sizes attribute
let sizes: number[];
if (this.sizesChanged) {
sizes = this.parseSizes(this.sizes);
if (sizes.length !== this.n) {
this.sizesChanged = false;
}
}
if (this.sizesChanged) {
// If both sizes and n changed, size takes precedence to resize the splitter
this.slotEnd = this.assignedSlotEnd(sizes);
this.nChanged = false;
this.sizesChanged = false;
} else if (this.nChanged) {
this.slotEnd = this.rescaleSlotEnd(this.n, this.slotEnd);
this.nChanged = false;
}
if (this.minSizesChanged) {
let minSizes: number[] = this.parseSizes(this.minSizes);
if (minSizes.length === this.n) {
this.minSizesArr = minSizes;
} else {
this.minSizesArr = this.defaultMinSizes(this.n);
}
this.minSizesChanged = false;
}
if (this.maxSizesChanged) {
let maxSizes: number[] = this.parseSizes(this.maxSizes);
if (maxSizes.length === this.n) {
this.maxSizesArr = maxSizes;
} else {
this.maxSizesArr = this.defaultMaxSizes(this.n);
}
this.maxSizesChanged = false;
}
}
defaultSlotEnd(n: number): number[] {
let slotEnd: number[] = [];
for (let i = 0; i < n; ++i) {
slotEnd.push((i + 1) / n);
}
return slotEnd;
}
assignedSlotEnd(sizes: number[]): number[] {
let slotEnd: number[] = [];
let currFrac = 0;
for (let i = 0; i < sizes.length; ++i) {
currFrac += sizes[i];
slotEnd.push(Math.min(1, currFrac));
}
return slotEnd;
}
rescaleSlotEnd(n: number, oldEnd: number[]): number[] {
let scale: number = oldEnd.length / n;
let slotEnd: number[] = [];
for (let i = 0; i < n - 1; ++i) {
if (i < oldEnd.length) {
slotEnd.push(oldEnd[i] * scale);
} else {
slotEnd.push((i + 1) / n);
}
}
// The last slot should always expand to the end
slotEnd.push(1);
return slotEnd;
}
defaultMinSizes(n: number): number[] {
let minSizes: number[] = [];
for (let i = 0; i < n; ++i) {
minSizes.push(0);
}
return minSizes;
}
defaultMaxSizes(n: number): number[] {
let maxSizes: number[] = [];
for (let i = 0; i < n; ++i) {
maxSizes.push(1);
}
return maxSizes;
}
parseSizes(sizesStr: string | number[]): number[] {
if (!sizesStr) {
return [];
}
// If sizes prop is array
if (Array.isArray(sizesStr)) {
if (sizesStr.length === this.n) {
return sizesStr;
} else {
return [];
}
}
// If sizes prop is stringified array
try {
const parsed = JSON.parse(sizesStr);
if (Array.isArray(parsed)) {
if (parsed.length === this.n) {
return parsed;
} else {
return [];
}
}
} catch (e) {}
// If sizes prop is freeform string such as
// '0.5, 0.25, 0.25' or '50%, 50%'
let sizesStrArr: string[] = sizesStr.split(',');
if (sizesStrArr.length !== this.n) {
return [];
}
let sizes: number[] = [];
const percentRegex: RegExp = /^\s*\d+(\.\d*)?\%\s*$/;
const fracRegex: RegExp = /^\s*(0|1)(\.\d*)?\s*$/;
for (let i = 0; i < sizesStrArr.length; ++i) {
let str: string = sizesStrArr[i];
if (str.match(percentRegex)) {
sizes.push(parseFloat(str) / 100);
} else if (str.match(fracRegex)) {
sizes.push(parseFloat(str));
} else {
return [];
}
}
return sizes;
}
onDragStart(event: DragEvent, i: number) {
// Resize on desktop
// Firefox wouldn't allow using drag events for resizing purposes,
// use this workaround instead.
event.preventDefault();
const mouseMoveListener = (e: MouseEvent) => {
this.throttledResize(e.clientX, e.clientY, i, e);
};
const mouseUpListener = () => {
window.removeEventListener('mousemove', mouseMoveListener);
window.removeEventListener('mouseup', mouseUpListener);
};
window.addEventListener('mousemove', mouseMoveListener);
window.addEventListener('mouseup', mouseUpListener);
}
onTouchMove = (event: TouchEvent, i: number) => {
// Resize on mobile
// Avoid scrolling the page
event.preventDefault();
if (event.touches.length > 0) {
this.throttledResize(
event.touches[0].clientX,
event.touches[0].clientY,
i,
event
);
}
};
resize(x: number, y: number, i: number, e: MouseEvent | TouchEvent) {
let start = i > 0 ? this.slotEnd[i - 1] : 0;
let min = start + this.minSizesArr[i];
min = Math.max(min, this.slotEnd[i + 1] - this.maxSizesArr[i + 1]);
let max = i < this.n - 1 ? this.slotEnd[i + 1] : 1;
max -= this.minSizesArr[i + 1];
max = Math.min(max, start + this.maxSizesArr[i]);
let frac: number;
let rect = this.el.getBoundingClientRect();
if (this.d === 'vertical') {
frac = (y - rect.top) / rect.height;
} else {
frac = (x - rect.left) / rect.width;
}
let doResize: boolean = false;
if (frac < min) {
if (this.slotEnd[i] > min) {
frac = min;
doResize = true;
}
} else if (frac > max) {
if (this.slotEnd[i] < max) {
frac = max;
doResize = true;
}
} else {
doResize = true;
}
if (doResize) {
this.slotEnd = [
...this.slotEnd.slice(0, i),
frac,
...this.slotEnd.slice(i + 1),
];
this.slotResized.emit({
divider: i,
sizes: this.slotEndToSizes(this.slotEnd),
originalEvent: e,
});
}
}
slotEndToSizes(slotEnd: number[]): number[] {
const sizes: number[] = [];
for (let i = 0; i < slotEnd.length; ++i) {
sizes.push(this.getSlotSize(i, slotEnd));
}
return sizes;
}
getSlotSize(i: number, slotEnd: number[]): number {
if (i === 0) {
return slotEnd[i];
} else {
return slotEnd[i] - slotEnd[i - 1];
}
}
render() {
if (!this.slotEnd || this.slotEnd.length === 0) {
return null;
}
let slotContainers = [];
let slotDividers = [];
let phantomDividers = [];
for (let i = 0; i < this.n; ++i) {
let containerId = `container${i}`;
let slotName = `${i}`;
let size: number = this.getSlotSize(i, this.slotEnd);
let style;
if (this.d === 'vertical') {
style = { width: '100%', height: `${size * 100}%` };
} else {
style = { width: `${size * 100}%`, height: '100%' };
}
slotContainers.push(
<div id={containerId} style={style}>
<slot name={slotName} />
</div>
);
}
for (let i = 0; i < this.n - 1; ++i) {
let style;
let displayClasses: string;
let phantomClasses: string;
if (this.d === 'vertical') {
style = { top: `${100 * this.slotEnd[i]}%` };
displayClasses = 'divider-v display-divider-v';
phantomClasses = 'divider-v phantom-divider-v';
} else {
style = { left: `${100 * this.slotEnd[i]}%` };
displayClasses = 'divider-h display-divider-h';
phantomClasses = 'divider-h phantom-divider-h';
}
slotDividers.push(<div class={displayClasses} style={style} />);
if (!this.fixed) {
phantomDividers.push(
<div
class={phantomClasses}
draggable={true}
onDragStart={(e) => {
this.onDragStart(e, i);
}}
onTouchMove={(e) => {
this.onTouchMove(e, i);
}}
style={style}
/>
);
}
}
return (
<div class="top-container">
<div class="dividers-container">
{slotDividers}
{phantomDividers}
</div>
<div
class={
this.d === 'vertical' ? 'slots-container-v' : 'slots-container-h'
}
>
{slotContainers}
</div>
</div>
);
}
} | the_stack |
import { TeliaPlayService } from '@/telia-play/TeliaPlayService';
import { ServiceApi } from '@apis/ServiceApi';
import { Requests } from '@common/Requests';
import { Utils } from '@common/Utils';
import { Item } from '@models/Item';
export interface TeliaContinueWatchingList {
list: TeliaContinueWatchingItem[];
}
export interface TeliaContinueWatchingItem {
mediaId: string;
assetId: string;
duration: number;
position: number;
timestamp: number;
hidden: boolean;
serviceType: string;
mediaObject: TeliaMediaObject;
}
export interface TeliaMediaObject {
loopId: string;
seriesTitle: string;
seasonNumber: number;
episodeNumber: number;
productionYear: string;
validTo: string;
validFrom: string;
type: string;
totalAssets: number;
title: string;
storeType: string;
storeName: string;
storeId: string;
seriesId: string;
seasonId: string;
renttime: number;
length: number;
contentproviderName: string;
contentproviderId: string;
titleId: string;
start: string;
categories: string[];
seasons: TeliaSeason[];
totalEpisodes: number;
age: number;
watched?: TeliaWatchedItem;
}
export interface TeliaAuth {
message: string;
geoblock: boolean;
token: TeliaToken;
}
export interface TeliaToken {
validTo: string;
accessToken: string;
refreshToken: string;
}
export interface TeliaMyList {
totalHits: number;
type: string;
items: TeliaMyListItem[];
}
export interface TeliaMyListItem {
id: string;
type: string;
decorations: { myList: { timestamp: string } };
}
export interface TeliaExploreItem {
id: string;
title: string;
description: string;
ratingLevel: number;
ratingName: string;
parentalControl: boolean;
references: TeliaExploreItemReferences;
}
export interface TeliaExploreItemReferences {
globalId: string;
loopId: string;
loopSeriesId: string;
seriesId: string;
seasonId: string;
}
export interface TeliaHits {
Success: boolean;
Data: { directHits: TeliaHit[]; totalHits: number };
}
export interface TeliaHit {
type: string;
object: TeliaMediaObject;
}
export interface TeliaSeason {
loopId: string;
title: string;
seasonNumber: number;
productionYear: string;
totalEpisodes: number;
}
export interface TeliaWatched {
map: Map<string, TeliaWatchedItem>;
}
export interface TeliaWatchedItem {
duration: number;
position: number;
timestamp: number;
hidden: boolean;
}
class _TeliaPlayApi extends ServiceApi {
OTTAPI_URL: string;
CONTINUE_WATCHING_API_URL: string;
MY_LIST_API_URL: string;
EXPLORE_URL: string;
IPTV_URL: string;
VIDEOS_URL: string;
AUTH_URL: string;
WATCHED_URL: string;
EPISODES_URL: string;
isActivated: boolean;
jwt: string;
constructor() {
super(TeliaPlayService.id);
this.AUTH_URL =
'https://www.teliaplay.se/rest/secure/users/authentication?deviceId=DASHJS_0245b511-4414-4b80-8ead-8e29da49075d&coreVersion=3.35.1&model=desktop_windows&nativeVersion=unknown_nativeVersion&uiVersion=6.24.0(578)';
this.OTTAPI_URL = 'https://ottapi.prod.telia.net/web/se';
this.IPTV_URL = 'https://iptvsearch-playplus-prod.han.telia.se/v1';
this.CONTINUE_WATCHING_API_URL = `${this.OTTAPI_URL}/continuewatching/rest/secure/v2/watchedItems/list/?fromIndex=0&toIndex=1000&serviceTypes=SVOD,VOD&protocols=dash&resolutions=sd`;
this.MY_LIST_API_URL = `${this.OTTAPI_URL}/mylistgateway/rest/secure/v1/savedItems/list?resolutions=sd&fromIndex=0&toIndex=5000`;
this.EXPLORE_URL = `${this.OTTAPI_URL}/exploregateway/rest/v3/explore/media`;
this.VIDEOS_URL = `${this.IPTV_URL}/videos?seriesIds={SERIES_IDS}&oneCoverIds={SERIES_IDS}&parental=false&fromIndex=0&toIndex=5000`;
this.EPISODES_URL = `${this.IPTV_URL}/series/{SHOW_ID}/season/{SEASON_ID}/episodes?sort=episode_asc&parental=false&fromIndex=0&toIndex=5000`;
this.WATCHED_URL = `${this.OTTAPI_URL}/continuewatching/rest/secure/v2/watchedItems/map/?serviceType=SVOD&mediaIds={EPISODE_IDS}`;
this.isActivated = false;
this.jwt = '';
}
async activate() {
const response = await Requests.send({
url: this.AUTH_URL,
method: 'GET',
});
const responseJson = JSON.parse(response) as TeliaAuth;
this.jwt = responseJson.token.accessToken;
this.session = {
profileName: '',
};
this.isActivated = true;
}
async checkLogin() {
if (!this.isActivated) {
await this.activate();
}
return !!this.session && this.session.profileName !== null;
}
async loadHistoryItems(): Promise<TeliaMediaObject[]> {
if (!this.isActivated) {
await this.activate();
}
// Get Continue Watching
const cwResponseText = await this.doGet(this.CONTINUE_WATCHING_API_URL);
const cwResponseJson = JSON.parse(cwResponseText) as TeliaContinueWatchingList;
const cwList = cwResponseJson.list;
// Get "my list"
const myListResponse = await this.doGet(this.MY_LIST_API_URL);
const myListJson = JSON.parse(myListResponse) as TeliaMyList;
const myListIds = myListJson.items.map((item) => item.id);
// Explore my list to find the ids of the shows
const exploreResponse = await this.doGet(`${this.EXPLORE_URL}/${myListIds.join()}`);
const exploreJson = JSON.parse(exploreResponse) as TeliaExploreItem[];
// Get videos to find season ids and movie metadata
const seriesIds: string[] = [];
// Add shows from explore & "Continue watching" list
seriesIds.push(...exploreJson.map((item) => item.references.loopSeriesId));
seriesIds.push(...cwList.map((cw) => cw.mediaObject.seriesId));
// Add movie IDs from both lists
const movieIds = exploreJson
.filter((e) => !e.references.seriesId)
.map((e) => e.references.loopId);
const movieIdsFromCw = cwList
.filter((cw) => !cw.mediaObject.seriesId)
.map((cw) => cw.mediaObject.loopId);
movieIds.push(...movieIdsFromCw);
seriesIds.push(...movieIds);
const ids = seriesIds.join();
// Must replace both ID lists for movies to work
const videosUrl = this.VIDEOS_URL.replace('{SERIES_IDS}', ids).replace('{SERIES_IDS}', ids);
const videosResponse = await this.doGet(videosUrl);
const videosJson = JSON.parse(videosResponse) as TeliaHits;
// Get episode info for each show / season and movies
const allEpisodes: TeliaMediaObject[] = [];
const movies: TeliaMediaObject[] = [];
for (const hit of videosJson.Data.directHits) {
if (this.parseType(hit.object) === 'show') {
const showId = hit.object.loopId;
const showUrl = this.EPISODES_URL.replace('{SHOW_ID}', showId);
for (const season of hit.object.seasons) {
const seasonUrl = showUrl.replace('{SEASON_ID}', season.loopId);
const episodesResponse = await this.doGet(seasonUrl);
const episodesJson = JSON.parse(episodesResponse) as TeliaHits;
const seasonEpisodes = episodesJson.Data.directHits.map((directHit) => directHit.object);
allEpisodes.push(...seasonEpisodes);
}
} else {
movies.push(hit.object);
}
}
// Get watched status of all episodes / movies
const episodeIds = allEpisodes.map((episode) => episode.loopId);
const allIds = [...movieIds, ...episodeIds];
const watchedUrl = this.WATCHED_URL.replace('{EPISODE_IDS}', allIds.join());
const watchedResponse = await this.doGet(watchedUrl);
const watchedJson = JSON.parse(watchedResponse) as TeliaWatched;
const watchedMap = new Map<string, TeliaWatchedItem>(Object.entries(watchedJson.map));
for (const episode of allEpisodes) {
episode.watched = watchedMap.get(episode.loopId);
}
for (const movie of movies) {
movie.watched = watchedMap.get(movie.loopId);
}
// Limit 10% watched
const watchedEpisodes = allEpisodes.filter(
(episode) => this.getPercentageWatched(episode.watched) > 10
);
const watchedMovies = movies.filter((movie) => this.getPercentageWatched(movie.watched));
// Skip season and episode-less shows (news shows and such)
const validEpisodes = watchedEpisodes.filter(
(episode) => episode.seasonNumber > 0 && episode.episodeNumber > 0
);
const historyItems: TeliaMediaObject[] = [];
historyItems.push(...validEpisodes);
historyItems.push(...watchedMovies);
this.hasReachedHistoryEnd = true;
return historyItems;
}
getHistoryItemId(historyItem: TeliaMediaObject) {
return historyItem.loopId;
}
convertHistoryItems(historyItems: TeliaMediaObject[]) {
const items = historyItems.map((historyItem) => this.parseHistoryItem(historyItem));
return items;
}
async doGet(url: string) {
const response = await Requests.send({
url: url,
method: 'GET',
headers: { Authorization: `Bearer ${this.jwt}` },
});
return response;
}
getPercentageWatched(watched: TeliaWatchedItem | undefined) {
if (typeof watched === 'undefined') {
return 0;
}
return Math.round(100 * (watched.position / watched.duration));
}
parseType(mediaObject: TeliaMediaObject) {
return mediaObject.categories.includes('Film') ? 'movie' : 'show';
}
parseHistoryItem(mediaObject: TeliaMediaObject): Item {
let item: Item;
const serviceId = this.id;
const id = mediaObject.loopId;
const type = this.parseType(mediaObject);
const year = parseInt(mediaObject.productionYear);
const progress = this.getPercentageWatched(mediaObject.watched);
const watchedAt = mediaObject.watched ? Utils.unix(mediaObject.watched.timestamp) : undefined;
if (type === 'show') {
const title = mediaObject.seriesTitle;
const season = mediaObject.seasonNumber;
const episode = mediaObject.episodeNumber;
// Cleanup HBO titles on the format "01:01 I Was Flying - Avenue 5"
let episodeTitle = mediaObject.title;
if (mediaObject.contentproviderName === 'HBO') {
if (episodeTitle.search(/^\d\d:\d\d\s/) > -1 && episodeTitle.search(` - ${title}`) > 0) {
episodeTitle = episodeTitle.replace(/^\d\d:\d\d\s/, '').replace(` - ${title}`, '');
}
}
item = new Item({
serviceId,
id,
type,
title,
year,
season,
episode,
episodeTitle,
progress,
watchedAt,
});
} else {
const title = mediaObject.title.trim();
item = new Item({
serviceId,
id,
type,
title,
year,
progress,
watchedAt,
});
}
return item;
}
}
export const TeliaPlayApi = new _TeliaPlayApi(); | the_stack |
import type { ObjectDictionary } from "@opticss/util";
import { AggregateRewriteData } from "./AggregateRewriteData";
export type ClassNameExpression = Array<string | number | boolean | null>;
type Defined<T> = T extends undefined ? never : T;
/**
* Throws an error if the value is null or undefined.
* @param val a value that should not be null or undefined.
* @param msg The error message
*/
function assert<T>(val: T, msg: string): asserts val is Defined<T> {
// I'm using double equals here on purpose for the type coercion.
// tslint:disable-next-line:triple-equals
if (val == undefined) throw new Error(msg);
}
const enum Condition {
static = 1,
toggle = 2,
ternary = 3,
switch = 4,
}
const enum FalsySwitchBehavior {
error = 0,
unset = 1,
default = 2,
}
/**
* Evaluates the runtime state of the style expression provided by the CSS
* Blocks rewriter.
*
* The result of evaluating a style expression is a set of global style ids
* that are currently enabled on this element by the author.
*
* Note: The full set of style ids that are enabled on the element is not known
* until after style resolution.
*
* The style expression is a list of values that consists of four sections:
* 1. Metadata
* 2. Block References
* 3. Style References
* 4. Conditionals
*
* ## Metadata
*
* The metadata consists of a single value, an integer that represents a schema
* version that describes the format of the style expression. If the rewrite
* changes, we'll use this to simultaneously support the old and new version(s).
*
* ## Block References
*
* The purpose of the block references is to enumerate the list of blocks that
* the styles come from. These blocks are later referred to in the style
* expression according to a number that represents a zero-based index into the
* order they appear in the block reference portion of the style expression.
*
* Format: `[blockCount: number, (sourceBlockGuid: string, runtimeBlockGuid: string | null)+]`
*
* - `blockCount` is the number of `sourceBlockGuid`/`runtimeBlockGuid` pairs that follow.
* - `sourceBlockGuid` is the guid of the block that was referenced in the source code.
* - `runtimeBlockGuid` is the guid of a block that implements the
* `sourceBlockGuid` interface and is meant to replace the source block at
* runtime. The `runtimeBlockGuid` is null when no runtime block is passed.
* This means the source block is used.
*
* ## Style References
*
* The purpose of Style References is to enumerate the list of styles that
* are referenced from the style conditionals that follow. Later references
* to these styles use the zero-based index into the order they appear in this
* section.
*
* Format: `[styleCount: number, (blockIndex: number, styleName: string)+]
* styleCount: The number of styles
*
* - `styleCount` is the number of styles that follow. Each style is a pair
* of values.
* - `blockIndex` is the index that references the block of the style.
* - `styleName` is a string that uniquely identifies the style in the block
* given as well as in any block that implements the same interface.
*
* ## Style Conditionals
*
* Style conditionals are how the possible authored styles are selected based
* on the runtime state of the application. There are 4 types of conditionals.
*
* The general form of the conditionals section of the style expression is
*
* Format: `[conditionalCount <conditional>+]`
*
* Where each `<conditional>` has a first argument that indicates the type
* of the conditional and the remaining arguments for each conditional are
* specific to that conditional type.
*
* The behavior of the conditional is to enable one or more of the possible
* styles on the element. All styles start out as disabled and require a conditional
* in order to cause that style to become enabled. A conditional never disables
* a style. Although rare, several conditionals might enable the same style.
*
* ### Static Conditional
*
* The static conditional enables a single style. No javascript-based state is
* taken into account.
*
* Type Value: `Condition.static` or `1`
* Format: [type: number, styleIndex: number]
*
* - `styleIndex`: An index of one of the styles provided in the style
* reference portion of the style expression.
*
* ### Toggle Conditional
*
* A toggle conditional is used to enable one or more styles based on the
* "truthiness" of a javascript value.
*
* Format: `[type: number, value: boolean, styleCount: number, (styleIndex: number)+]`
*
* - `type`: `Condition.toggle` or `2`
* - `value`: A javascript value will be coerced to a boolean.
* - `styleCount`: The number of styles that are enabled when this condition is true.
* - `styleIndex`: An index of one of the styles provided in the style
* reference portion of the style expression.
*
* ### Ternary Conditional
*
* A ternary conditional is used to enable 0 or more styles when a variable is
* true or to enable a different set of 0 or more styles when the same variable
* is false.
*
* Format: `[type: number, value: boolean,
* trueCount: number, (trueStyleIndex: number)*,
* falseCount: number, (falseStyleIndex: number)*]`
*
* - `type`: `Condition.ternary` or `3`
* - `value`: A javascript value will be coerced to a boolean.
* - `trueCount`: The number of styles that are enabled when this condition is true.
* - `trueStyleIndex`: An index of one of the styles provided in the style
* reference portion of the style expression.
* - `falseCount`: The number of styles that are enabled when this condition is false.
* - `falseStyleIndex`: An index of one of the styles provided in the style
* reference portion of the style expression.
*
* ### Switch Conditional
*
* A switch conditional is used to enable one or more styles based on the value
* of a string matching one of several possible values. This conditional is
* different from the others in that it doesn't select styles from style
* reference section and it is the only conditional that can result in a
* runtime error.
*
* Format: `[type: number, blockIndex: number,
* attributeName: string, value: string | null]`
*
* - `type`: `Condition.switch` or `4`
* - `blockIndex`: An index that references a block from the block reference
* portion of the style expression.
* - `attributeName`: A string that uniquely identifies the attribute in the
* block given as well as in any block that implements the same interface.
* - `value`: A string that must match one of the attribute's values. If null
* is returned then no style is enabled (undefined is an error). If the value
* does not match one of the known attribute values then an error is raised.
*/
export class StyleEvaluator {
data: AggregateRewriteData;
args: ClassNameExpression;
index = 0;
// the values in blockStyleIndices map style strings to an index into the array
// stored at the same index of blockStyleIds. That means that
// `blockStyleIds[i][blockStyleIndices[i][":scope"]]` returns the globally
// unique id for the ":scope" style of the runtime selected block.
blockStyleIndices: Array<Record<string, number>> = [];
// When null, it indicates a missing style. This can happen when the block
// that implements an interface did not fully implement that interface
// because the block it implements has changed since the implementing block
// was precompiled and released.
blockStyleIds: Array<Array<number | null>> = [];
blockGroups: Array<ObjectDictionary<ObjectDictionary<string>>> = [];
styles: Array<number | null> = [];
constructor(data: AggregateRewriteData, args: ClassNameExpression) {
this.data = data;
this.args = args;
}
evaluateBlocks() {
let numBlocks = this.num();
while (numBlocks--) {
let sourceGuid = this.str();
let runtimeGuid = this.str(true); // this won't be non-null until we implement block passing.
let blockIndex = this.data.blockIds[sourceGuid];
assert(blockIndex, `unknown block ${sourceGuid}`);
let runtimeBlockIndex = runtimeGuid === null ? blockIndex : this.data.blockIds[runtimeGuid];
let blockInfo = this.data.blocks[blockIndex];
this.blockStyleIndices.push(blockInfo.blockInterfaceStyles);
let styleIds = blockInfo.implementations[runtimeBlockIndex];
assert(styleIds, "unknown implementation");
this.blockStyleIds.push(styleIds);
this.blockGroups.push(blockInfo.groups);
}
}
evaluateStyles() {
// Now we build a list of styles ids. these styles are referred to in the
// class name expression by using an index into the `styles` array that
// we're building.
let numStyles = this.num();
while (numStyles--) {
let block = this.num();
let style = this.str();
this.styles.push(this.blockStyleIds[block][this.blockStyleIndices[block][style]]);
}
}
evaluateToggleCondition(styleStates: Array<boolean>) {
// Can enable a single style
let b = this.bool();
let numStyles = this.num();
while (numStyles--) {
let s = this.num();
if (b) {
styleStates[s] = true;
}
}
}
evaluate(): Set<number> {
let rewriteVersion = this.num();
if (rewriteVersion > 0) throw new Error(`The rewrite schema is newer than expected. Please upgrade @css-blocks/ember-app.`);
this.evaluateBlocks();
this.evaluateStyles();
// Now we calculate the runtime javascript state of these styles.
// we start with all of the styles as "off" and can turn them on
// by setting the corresponding index of a `style` entry in `styleStates`
// to true.
let numConditions = this.num();
let styleStates = new Array<boolean>(this.styles.length);
let stylesApplied = new Set<number>();
while (numConditions--) {
let condition = this.num();
switch (condition) {
case Condition.static:
// static styles are always enabled.
styleStates[this.num()] = true;
break;
case Condition.toggle:
this.evaluateToggleCondition(styleStates);
break;
case Condition.ternary:
this.evaluateTernaryCondition(styleStates);
break;
case Condition.switch:
this.evaluateSwitchCondition(stylesApplied);
break;
default:
throw new Error(`Unknown condition type ${condition}`);
}
}
for (let i = 0; i < this.styles.length; i++) {
if (styleStates[i] && this.styles[i] !== null) {
stylesApplied.add(this.styles[i]!);
}
}
return stylesApplied;
}
evaluateSwitchCondition(stylesApplied: Set<number>) {
let falsyBehavior = this.num();
let blockIndex = this.num();
let attribute = this.str();
let currentValue = this.str(true, true);
if (!currentValue) {
if (currentValue === null || falsyBehavior === FalsySwitchBehavior.unset) {
return;
} else {
throw new Error(`A value is required for ${attribute}.`);
}
}
let attrValue = this.blockGroups[blockIndex][attribute][currentValue];
if (attrValue === undefined) {
let legal = Object.keys(this.blockGroups[blockIndex][attribute]);
throw new Error(`"${currentValue} is not a known attribute value. Expected one of: ${legal.join(", ")}`);
}
let style = this.blockStyleIds[blockIndex][this.blockStyleIndices[blockIndex][attrValue]];
if (style) {
stylesApplied.add(style);
}
}
evaluateTernaryCondition(styleStates: Array<boolean>) {
// Ternary supports multiple styles being enabled when true
// and multiple values being enabled when false.
let result = this.bool();
let numIfTrue = this.num();
while (numIfTrue--) {
let s = this.num();
if (result) styleStates[s] = true;
}
let numIfFalse = this.num();
while (numIfFalse--) {
let s = this.num();
if (!result) styleStates[s] = true;
}
}
nextVal(type: "number", allowNull: boolean, allowUndefined: false): number | null;
nextVal(type: "number", allowNull: boolean, allowUndefined: boolean): number | null | undefined;
nextVal(type: "string", allowNull: boolean, allowUndefined: boolean): string | null | undefined;
nextVal(type: "string" | "number", allowNull: boolean, allowUndefined: boolean): string | number | boolean | null | undefined {
if (this.args.length === 0) {
throw new Error("empty argument stack");
}
let v = this.args[this.index++];
if (v === undefined) {
if (allowUndefined) {
return undefined;
} else {
throw new Error(`Unexpected undefined value encountered.`);
}
}
if (v === null) {
if (allowNull) {
return v;
} else {
throw new Error(`Unexpected null value encountered.`);
}
}
if (typeof v === type) {
return v;
}
throw new Error(`Expected ${type} got ${v}`);
}
num(): number;
num(allowNull: false): number;
num(allowNull: true): number | null;
num(allowNull = false): number | null {
return this.nextVal("number", allowNull, false);
}
str(): string;
str(allowNull: false): string;
str(allowNull: true): string | null;
str(allowNull: false, allowUndefined: false): string;
str(allowNull: false, allowUndefined: true): string | undefined;
str(allowNull: true, allowUndefined: false): string | null;
str(allowNull: true, allowUndefined: true): string | null | undefined;
str(allowNull = false, allowUndefined = false): string | null | undefined {
return this.nextVal("string", allowNull, allowUndefined);
}
/**
* interprets the next value as a truthy and coerces it to a boolean.
*/
bool(): boolean {
return !!this.args[this.index++];
}
} | the_stack |
import { CompositeLayer } from '@deck.gl/core';
import { GeoJsonLayer } from '@deck.gl/layers';
import AnimatedFlowLinesLayer from './AnimatedFlowLinesLayer/AnimatedFlowLinesLayer';
import { Colors, DiffColors } from './colors';
import FlowCirclesLayer from './FlowCirclesLayer/FlowCirclesLayer';
import FlowLinesLayer from './FlowLinesLayer/FlowLinesLayer';
import Selectors from './Selectors';
import { LayerProps } from './LayerProps';
import {
DeckGLLayer,
Flow,
FlowAccessor,
FlowLayerPickingInfo,
isFeatureCollection,
Location,
LocationAccessor,
LocationCircle,
LocationCircleAccessor,
LocationCircleType,
Locations,
PickingHandler,
PickingType,
} from './types';
export interface BasicProps extends LayerProps {
locations: Locations;
flows: Flow[];
diffMode?: boolean;
animate?: boolean;
animationCurrentTime?: number;
animationTailLength?: number;
colors?: Colors | DiffColors;
getLocationId?: LocationAccessor<string>;
getLocationCentroid?: LocationAccessor<[number, number]>;
getLocationTotalIn?: LocationAccessor<number>;
getLocationTotalOut?: LocationAccessor<number>;
getLocationTotalWithin?: LocationAccessor<number>;
getFlowOriginId?: FlowAccessor<string>;
getFlowDestId?: FlowAccessor<string>;
getFlowMagnitude?: FlowAccessor<number>;
getAnimatedFlowLineStaggering?: FlowAccessor<number>;
getFlowColor?: FlowAccessor<string | undefined>;
maxFlowThickness?: number;
flowMagnitudeExtent?: [number, number];
locationTotalsExtent?: [number, number];
maxLocationCircleSize?: number;
minPickableFlowThickness?: number;
showTotals?: boolean;
showLocationAreas?: boolean;
showOnlyTopFlows?: number;
selectedLocationIds?: string[];
highlightedLocationId?: string;
highlightedLocationAreaId?: string;
highlightedFlow?: Flow;
outlineThickness?: number;
updateTriggers?: {
getFlowLinesSourcePosition?: any;
getFlowLinesTargetPosition?: any;
getFlowLinesThickness?: any;
getFlowLinesEndpointOffsets?: any;
getFlowLinesColor?: any;
getCirclesRadius?: any;
getCirclesColor?: any;
getLocationAreasFillColor?: any;
getLocationAreasLineColor?: any;
};
}
export interface Props extends BasicProps {
id: string;
onClick?: PickingHandler<FlowLayerPickingInfo>;
onHover?: PickingHandler<FlowLayerPickingInfo>;
}
enum LayerKind {
LOCATIONS = 'LOCATIONS',
LOCATION_AREAS = 'LOCATION_AREAS',
LOCATION_AREAS_OUTLINES = 'LOCATION_AREAS_OUTLINES',
LOCATION_AREAS_SELECTED_AND_HIGHLIGHTED = 'LOCATION_AREAS_SELECTED_AND_HIGHLIGHTED',
FLOWS = 'FLOWS',
LOCATIONS_HIGHLIGHTED = 'LOCATIONS_HIGHLIGHTED',
FLOWS_HIGHLIGHTED = 'FLOWS_HIGHLIGHTED',
}
const LAYER_ID_SEPARATOR = ':::';
function getLayerId(baseLayerId: string, layerKind: LayerKind) {
return `${baseLayerId}${LAYER_ID_SEPARATOR}${layerKind.valueOf()}`;
}
function getLayerKind(id: string): LayerKind {
const kind = id.substr(id.lastIndexOf(LAYER_ID_SEPARATOR) + LAYER_ID_SEPARATOR.length);
return LayerKind[kind as keyof typeof LayerKind];
}
function getPickType({ id }: DeckGLLayer): PickingType | undefined {
switch (getLayerKind(id)) {
case LayerKind.FLOWS:
case LayerKind.FLOWS_HIGHLIGHTED:
return PickingType.FLOW;
case LayerKind.LOCATIONS:
case LayerKind.LOCATIONS_HIGHLIGHTED:
return PickingType.LOCATION;
case LayerKind.LOCATION_AREAS:
return PickingType.LOCATION_AREA;
default:
return undefined;
}
}
export default class FlowMapLayer extends CompositeLayer {
static layerName: string = 'FlowMapLayer';
static defaultProps = {
getLocationId: { type: 'accessor', value: (l: Location) => l.id || l.properties.id },
getLocationCentroid: { type: 'accessor', value: (l: Location) => l.properties.centroid },
getFlowOriginId: { type: 'accessor', value: (f: Flow) => f.origin },
getFlowDestId: { type: 'accessor', value: (f: Flow) => f.dest },
getFlowMagnitude: { type: 'accessor', value: (f: Flow) => f.count },
showTotals: true,
maxLocationCircleSize: 15,
outlineThickness: 1,
showLocationAreas: true,
animationTailLength: 0.7,
};
props!: Props;
constructor(props: Props) {
super(props);
}
initializeState() {
const {
getLocationTotalIn,
getLocationTotalOut,
getLocationTotalWithin,
getLocationId,
getLocationCentroid,
getFlowOriginId,
getFlowDestId,
getFlowMagnitude,
getFlowColor,
} = this.props;
const selectors = new Selectors({
getLocationId: getLocationId!,
getLocationCentroid: getLocationCentroid!,
getLocationTotalIn,
getLocationTotalOut,
getLocationTotalWithin,
getFlowOriginId: getFlowOriginId!,
getFlowDestId: getFlowDestId!,
getFlowMagnitude: getFlowMagnitude!,
getFlowColor,
});
this.setState({ selectors });
}
updateState(params: any) {
super.updateState(params);
const { props, changeFlags } = params;
if (changeFlags.propsChanged) {
const {
getLocationTotalIn,
getLocationTotalOut,
getLocationTotalWithin,
getLocationId,
getLocationCentroid,
getFlowOriginId,
getFlowDestId,
getFlowMagnitude,
getFlowColor,
} = props;
this.state.selectors.setInputAccessors({
getLocationId,
getLocationCentroid,
getLocationTotalIn,
getLocationTotalOut,
getLocationTotalWithin,
getFlowOriginId,
getFlowDestId,
getFlowMagnitude,
getFlowColor,
});
}
}
getPickingInfo(params: any): FlowLayerPickingInfo {
const type = getPickType(params.sourceLayer);
if (!type) {
return params.info;
}
const info = {
...params.info,
type,
};
const { selectors } = this.state;
if (type === PickingType.FLOW) {
const getLocationById = selectors.getLocationByIdGetter(this.props);
const { getFlowOriginId, getFlowDestId } = selectors.getInputAccessors();
const flow = info.object as Flow;
return {
...info,
...(flow && {
origin: getLocationById(getFlowOriginId(flow)),
dest: getLocationById(getFlowDestId(flow)),
}),
};
}
if (type === PickingType.LOCATION || type === PickingType.LOCATION_AREA) {
const location: Location = type === PickingType.LOCATION ? info.object && info.object.location : info.object;
const getLocationTotalIn = selectors.getLocationTotalInGetter(this.props);
const getLocationTotalOut = selectors.getLocationTotalOutGetter(this.props);
const getLocationTotalWithin = selectors.getLocationTotalWithinGetter(this.props);
const getLocationCircleRadius = selectors.getLocationCircleRadiusGetter(this.props);
return {
...info,
...(location && {
object: location,
totalIn: getLocationTotalIn(location),
totalOut: getLocationTotalOut(location),
totalWithin: getLocationTotalWithin(location),
circleRadius: getLocationCircleRadius({ location, type: LocationCircleType.OUTER }),
}),
};
}
return info;
}
renderLayers() {
const { showLocationAreas, locations, highlightedLocationId } = this.props;
const { selectors } = this.state;
const topFlows = selectors.getTopFlows(this.props);
const highlightedFlows = selectors.getHighlightedFlows(this.props);
const isLocationHighlighted = highlightedLocationId != null;
const locationCircles = selectors.getLocationCircles(this.props);
const layers: DeckGLLayer[] = [];
if (showLocationAreas && isFeatureCollection(locations)) {
layers.push(this.getLocationAreasLayer(getLayerId(this.props.id, LayerKind.LOCATION_AREAS), false));
}
layers.push(
this.getFlowLinesLayer(getLayerId(this.props.id, LayerKind.FLOWS), topFlows, false, isLocationHighlighted),
);
if (showLocationAreas && isFeatureCollection(locations)) {
layers.push(
this.getHighlightedLocationAreasLayer(
getLayerId(this.props.id, LayerKind.LOCATION_AREAS_SELECTED_AND_HIGHLIGHTED),
),
);
}
if (highlightedFlows) {
layers.push(
this.getFlowLinesLayer(getLayerId(this.props.id, LayerKind.FLOWS_HIGHLIGHTED), highlightedFlows, true, false),
);
}
layers.push(this.getLocationCirclesLayer(getLayerId(this.props.id, LayerKind.LOCATIONS), locationCircles, false));
if (isLocationHighlighted) {
const highlightedLocationCircles = selectors.getHighlightedLocationCircles(this.props);
layers.push(
this.getLocationCirclesLayer(
getLayerId(this.props.id, LayerKind.LOCATIONS_HIGHLIGHTED),
highlightedLocationCircles,
true,
),
);
}
if (showLocationAreas && isFeatureCollection(locations)) {
layers.push(this.getLocationAreasLayer(getLayerId(this.props.id, LayerKind.LOCATION_AREAS_OUTLINES), true));
}
return layers;
}
private getLocationAreasLayer(id: string, outline: boolean): DeckGLLayer {
const { locations, selectedLocationIds, highlightedLocationId, highlightedFlow, updateTriggers } = this.props;
const { selectors } = this.state;
const colors = selectors.getColors(this.props);
return new GeoJsonLayer(
this.getSubLayerProps({
id,
getFillColor: selectors.getLocationAreaFillColorGetter(this.props),
getLineColor: colors.locationAreas.outline,
lineJointRounded: true,
data: locations,
stroked: outline,
filled: !outline,
...(outline && { pickable: false }),
lineWidthMinPixels: 1,
pointRadiusMinPixels: 1,
updateTriggers: {
getFillColor: {
colors,
...(outline && { selectedLocationIds, highlightedLocationId, highlightedFlow }),
...updateTriggers?.getLocationAreasFillColor,
},
getLineColor: {
colors,
...updateTriggers?.getLocationAreasLineColor,
},
},
}),
);
}
private getHighlightedLocationAreasLayer(id: string): DeckGLLayer {
const { selectors } = this.state;
const { highlightedLocationId, highlightedLocationAreaId, updateTriggers } = this.props;
const colors = selectors.getColors(this.props);
const getLocationById = selectors.getLocationByIdGetter(this.props);
return new GeoJsonLayer(
this.getSubLayerProps({
id,
getFillColor: () => colors.locationAreas.highlighted,
getLineColor: colors.locationAreas.outline,
lineJointRounded: true,
data: highlightedLocationId
? getLocationById(highlightedLocationId)
: highlightedLocationAreaId
? getLocationById(highlightedLocationAreaId)
: undefined,
stroked: false,
filled: true,
pickable: false,
lineWidthMinPixels: 5,
updateTriggers: {
getFillColor: {
colors,
...updateTriggers?.getLocationAreasFillColor,
},
getLineColor: {
colors,
...updateTriggers?.getLocationAreasLineColor,
},
},
}),
);
}
private getFlowLinesLayer(
id: string,
flows: Flow[],
highlighted: boolean,
dimmed: boolean,
): FlowLinesLayer | AnimatedFlowLinesLayer {
const {
getFlowOriginId,
getFlowDestId,
getFlowMagnitude,
getLocationCentroid,
getAnimatedFlowLineStaggering,
showTotals,
maxLocationCircleSize,
outlineThickness,
minPickableFlowThickness,
maxFlowThickness,
flowMagnitudeExtent,
locationTotalsExtent,
updateTriggers,
} = this.props;
const { selectors } = this.state;
const endpointOffsets: [number, number] = [(maxLocationCircleSize || 0) + 1, (maxLocationCircleSize || 0) + 1];
const getLocationRadius = selectors.getLocationCircleRadiusGetter(this.props);
const getLocationById = selectors.getLocationByIdGetter(this.props);
const flowThicknessScale = selectors.getFlowThicknessScale(this.props);
const getSourcePosition: FlowAccessor<[number, number]> = (flow, info) =>
getLocationCentroid!(getLocationById(getFlowOriginId!(flow, info)));
const getTargetPosition: FlowAccessor<[number, number]> = (flow, info) =>
getLocationCentroid!(getLocationById(getFlowDestId!(flow, info)));
const getThickness: FlowAccessor<number> = (flow, info) => flowThicknessScale(getFlowMagnitude!(flow, info));
const getEndpointOffsets: FlowAccessor<[number, number]> = (flow, info) => {
if (!showTotals) {
return endpointOffsets;
}
return [
getLocationRadius({
location: getLocationById(getFlowOriginId!(flow, info)),
type: LocationCircleType.OUTLINE,
}),
getLocationRadius({
location: getLocationById(getFlowDestId!(flow, info)),
type: LocationCircleType.OUTLINE,
}),
];
};
const flowColorScale = selectors.getFlowColorScale(this.props);
const colors = selectors.getColors(this.props);
const getColor = selectors.getFlowLinesColorGetter(colors, flowColorScale, highlighted, dimmed);
const { animate } = this.props;
const thicknessUnit = maxFlowThickness != null ? maxFlowThickness : FlowLinesLayer.defaultProps.thicknessUnit;
const baseProps = {
id,
getSourcePosition,
getTargetPosition,
getThickness,
getEndpointOffsets,
getColor,
data: flows,
...(highlighted && { pickable: false }),
drawOutline: !dimmed,
updateTriggers: {
getSourcePosition: updateTriggers?.getFlowLinesSourcePosition,
getTargetPosition: updateTriggers?.getFlowLinesTargetPosition,
getThickness: {
flowMagnitudeExtent,
maxFlowThickness,
...updateTriggers?.getFlowLinesThickness,
},
getColor: {
colors,
dimmed,
...updateTriggers?.getFlowLinesColor,
},
getEndpointOffsets: {
showTotals,
locationTotalsExtent,
...updateTriggers?.getFlowLinesEndpointOffsets,
},
},
thicknessUnit,
outlineColor: colors.outlineColor,
...(outlineThickness && { outlineThickness }),
...(minPickableFlowThickness != null && {
getPickable: (f: Flow) => (thicknessUnit * getThickness(f) >= minPickableFlowThickness ? 1.0 : 0.0),
}),
parameters: {
...this.props.parameters,
depthTest: false,
},
};
if (animate) {
return new AnimatedFlowLinesLayer(
this.getSubLayerProps({
...baseProps,
currentTime: this.props.animationCurrentTime,
...(this.props.animationTailLength != null && {
animationTailLength: this.props.animationTailLength,
}),
...(getAnimatedFlowLineStaggering && {
getStaggering: getAnimatedFlowLineStaggering,
}),
}),
);
} else {
return new FlowLinesLayer(this.getSubLayerProps(baseProps));
}
}
private getLocationCirclesLayer(id: string, circles: LocationCircle[], highlighted: boolean): FlowCirclesLayer {
const {
highlightedLocationId,
selectedLocationIds,
getLocationCentroid,
flows,
showTotals,
updateTriggers,
maxLocationCircleSize,
locationTotalsExtent,
} = this.props;
const { selectors } = this.state;
const getRadius = showTotals ? selectors.getLocationCircleRadiusGetter(this.props) : () => maxLocationCircleSize;
const colors = selectors.getColors(this.props);
const getColor = selectors.getLocationCircleColorGetter(this.props);
const getPosition: LocationCircleAccessor<[number, number]> = locCircle => getLocationCentroid!(locCircle.location);
return new FlowCirclesLayer(
this.getSubLayerProps({
id,
getColor,
getPosition,
getRadius,
data: circles,
updateTriggers: {
getRadius: {
showTotals,
selectedLocationIds,
maxLocationCircleSize,
locationTotalsExtent,
flows,
...updateTriggers?.getCirclesRadius,
},
getColor: {
colors,
highlightedLocationId,
selectedLocationIds,
flows,
...updateTriggers?.getCirclesColor,
},
},
parameters: {
...this.props.parameters,
depthTest: false,
},
}),
);
}
} | the_stack |
import {
add0x,
getLogger,
hexBufToStr,
hexStrToBuf,
logError,
} from '@pigi/core-utils'
/* Internal Imports */
import {
Address,
AggregatorUnsupportedError,
isCreateAndTransferTransition,
isStateTransitionError,
isSwapTransition,
isTransferTransition,
LocalFraudProof,
LocalMachineError,
RollupBlock,
RollupStateValidator,
RollupTransaction,
RollupTransition,
RollupTransitionPosition,
SignedTransaction,
StateSnapshot,
TokenType,
ValidationOutOfOrderError,
} from '../types'
import {
DefaultRollupStateMachine,
PIGI_TOKEN_TYPE,
UNI_TOKEN_TYPE,
UNISWAP_STORAGE_SLOT,
} from '../common'
import { ContractFraudProof, DefaultRollupBlock } from './types'
const log = getLogger('rollup-state-validator')
export class DefaultRollupStateValidator implements RollupStateValidator {
public rollupMachine: DefaultRollupStateMachine
private currentPosition: RollupTransitionPosition = {
blockNumber: 1,
transitionIndex: 0,
}
private storedBlocks: RollupBlock[] = []
public constructor(theRollupMachine: DefaultRollupStateMachine) {
this.rollupMachine = theRollupMachine
}
public async getCurrentVerifiedPosition(): Promise<RollupTransitionPosition> {
return { ...this.currentPosition }
}
public async getInputStateSnapshots(
transition: RollupTransition
): Promise<StateSnapshot[]> {
const firstSlot: number = transition.senderSlotIndex
const secondSlot: number = isSwapTransition(transition)
? UNISWAP_STORAGE_SLOT
: transition.recipientSlotIndex
log.info(`Returning snapshots for slots ${firstSlot} and ${secondSlot}.`)
return [
await this.rollupMachine.getSnapshotFromSlot(firstSlot),
await this.rollupMachine.getSnapshotFromSlot(secondSlot),
]
}
public async getTransactionFromTransitionAndSnapshots(
transition: RollupTransition,
snapshots: StateSnapshot[]
): Promise<SignedTransaction> {
let convertedTx: RollupTransaction
if (isCreateAndTransferTransition(transition)) {
const sender: Address = snapshots[0].state.pubkey
const recipient: Address = transition.createdAccountPubkey
convertedTx = {
sender,
recipient,
tokenType: transition.tokenType as TokenType,
amount: transition.amount,
}
} else if (isTransferTransition(transition)) {
const sender: Address = snapshots[0].state.pubkey
const recipient: Address = snapshots[1].state.pubkey
convertedTx = {
sender,
recipient,
tokenType: transition.tokenType as TokenType,
amount: transition.amount,
}
return {
signature: transition.signature,
transaction: convertedTx,
}
} else if (isSwapTransition(transition)) {
const swapper: Address = snapshots[0].state.pubkey
convertedTx = {
sender: swapper,
tokenType: transition.tokenType as TokenType,
inputAmount: transition.inputAmount,
minOutputAmount: transition.minOutputAmount,
timeout: transition.timeout,
}
}
return {
signature: transition.signature,
transaction: convertedTx,
}
}
public async checkNextTransition(
nextTransition: RollupTransition
): Promise<LocalFraudProof> {
let preppedFraudInputs: StateSnapshot[]
let generatedPostRoot: Buffer
const transitionPostRoot: Buffer = hexStrToBuf(nextTransition.stateRoot)
if (isCreateAndTransferTransition(nextTransition)) {
const slotIfSequential: number = await this.rollupMachine.getNextNewAccountSlot()
// if the created slot is not sequential, for now it will break
if (slotIfSequential !== nextTransition.recipientSlotIndex) {
log.error(
`Next slot in Rollup State Machine is ${slotIfSequential} but next transition recipient slot is ${nextTransition.recipientSlotIndex}`
)
throw new AggregatorUnsupportedError()
}
}
// In case there was fraud in this transaction, get state snapshots for each input so we can prove the fraud later.
log.info(
`Getting the pre-state inclusion proofs for a ${typeof nextTransition}: ${JSON.stringify(
nextTransition
)}`
)
preppedFraudInputs = await this.getInputStateSnapshots(nextTransition)
// convert to transaction so we can apply to validator rollup machine
log.info(`Converting the transition into a transaction...`)
const inputAsTransaction: SignedTransaction = await this.getTransactionFromTransitionAndSnapshots(
nextTransition,
preppedFraudInputs
)
log.info(
`Got back transaction: ${JSON.stringify(
inputAsTransaction
)}. Attempting to apply it to local state machine.`
)
try {
await this.rollupMachine.applyTransaction(inputAsTransaction)
generatedPostRoot = await this.rollupMachine.getStateRoot()
} catch (error) {
if (isStateTransitionError(error)) {
log.error(
`The transaction did not pass the state machine, must be badly formed! Returning fraud proof. Transition: ${JSON.stringify(
nextTransition
)}`
)
return {
fraudPosition: this.currentPosition,
fraudInputs: preppedFraudInputs,
fraudTransition: nextTransition,
}
} else {
logError(
log,
`Transaction ingestion threw an error--but for a reason unrelated to the transition itself not passing the state machine. Uh oh! Transition: ${JSON.stringify(
nextTransition
)}`,
error
)
throw new LocalMachineError()
}
}
if (generatedPostRoot.equals(transitionPostRoot)) {
log.debug(
`Ingested valid transition and postRoot matched the aggregator claim.`
)
this.currentPosition.transitionIndex++
return undefined
} else {
log.error(
`Ingested valid transition and postRoot disagreed with the aggregator claim--returning fraud. Transition: ${JSON.stringify(
nextTransition
)}. Expected State Root: ${hexBufToStr(generatedPostRoot)}`
)
return {
fraudPosition: this.currentPosition,
fraudInputs: preppedFraudInputs,
fraudTransition: nextTransition,
}
}
}
public async storeBlock(newBlock: RollupBlock): Promise<void> {
this.storedBlocks[newBlock.blockNumber - 1] = newBlock
}
public async validateStoredBlock(
blockNumber: number
): Promise<ContractFraudProof> {
// grab the block itself from our stored blocks
const blockToValidate: RollupBlock = this.storedBlocks[blockNumber - 1]
if (!blockToValidate) {
log.error(
`Tried to check next block, but it has not yet been stored yet. Block Number: ${blockNumber}`
)
throw new ValidationOutOfOrderError()
}
const nextBlockNumberToValidate: number = (await this.getCurrentVerifiedPosition())
.blockNumber
if (blockToValidate.blockNumber !== nextBlockNumberToValidate) {
log.error(
`Next block to validate is ${nextBlockNumberToValidate} but trying to validate block ${blockToValidate.blockNumber}!`
)
throw new ValidationOutOfOrderError()
}
const transitionCount: number =
!blockToValidate || !blockToValidate.transitions
? 0
: blockToValidate.transitions.length
log.info(
`Starting validation for block ${blockToValidate.blockNumber}. Number of transitions: ${transitionCount}`
)
let transitionNumber = 0
// Now loop through and apply the transitions one by one
for (const transition of blockToValidate.transitions) {
const fraudCheck: LocalFraudProof = await this.checkNextTransition(
transition
)
if (!!fraudCheck) {
log.info(
`Found evidence of fraud at transition ${JSON.stringify(
transition
)}. The current index is ${
(await this.getCurrentVerifiedPosition()).transitionIndex
}. Submitting fraud proof.`
)
const generatedProof = await this.generateContractFraudProof(
fraudCheck,
blockToValidate
)
return generatedProof
}
log.info(
`Successfully validated transition ${++transitionNumber} of ${
blockToValidate.transitions.length
} of block ${blockToValidate.blockNumber}`
)
}
log.info(
`Found no fraud in block ${nextBlockNumberToValidate}, incrementing block number and reseting transition index`
)
// otherwise
this.currentPosition.blockNumber++
this.currentPosition.transitionIndex = 0
return undefined
}
public async generateContractFraudProof(
localProof: LocalFraudProof,
block: RollupBlock
): Promise<ContractFraudProof> {
const fraudInputs: StateSnapshot[] = localProof.fraudInputs as StateSnapshot[]
log.info(
`Converting the LocalFraudProof's snapshots into contract-friendly includedStorageSlots... ${JSON.stringify(
fraudInputs
)}`
)
const includedStorageSlots = [
{
storageSlot: {
value: {
pubkey: fraudInputs[0].state.pubkey,
balances: [
fraudInputs[0].state.balances[UNI_TOKEN_TYPE],
fraudInputs[0].state.balances[PIGI_TOKEN_TYPE],
],
},
slotIndex: fraudInputs[0].slotIndex,
},
siblings: fraudInputs[1].inclusionProof.map((x) => add0x(x)),
},
{
storageSlot: {
value: {
pubkey: fraudInputs[1].state.pubkey,
balances: [
fraudInputs[1].state.balances[UNI_TOKEN_TYPE],
fraudInputs[1].state.balances[PIGI_TOKEN_TYPE],
],
},
slotIndex: fraudInputs[1].slotIndex,
},
siblings: fraudInputs[1].inclusionProof.map((x) => add0x(x)),
},
]
log.info(
`Generating a Merklized block to build inclusions for the fraudulent transition...`
)
const merklizedBlock: DefaultRollupBlock = new DefaultRollupBlock(
block.transitions,
block.blockNumber
)
await merklizedBlock.generateTree()
const curPosition = await this.getCurrentVerifiedPosition()
const fraudulentTransitionIndex = curPosition.transitionIndex
log.info(
`Fraudlent transition index is ${fraudulentTransitionIndex}. Getting inclusion proof in rollup block...`
)
const fraudulentIncludedTransition = await merklizedBlock.getIncludedTransition(
fraudulentTransitionIndex
)
let validIncludedTransition
if (fraudulentTransitionIndex > 0) {
log.info(
`Since the fraud transition index is > 0, we serve the valid pre-transition from the same block at the previous position.`
)
validIncludedTransition = await merklizedBlock.getIncludedTransition(
fraudulentTransitionIndex - 1
)
} else {
log.info(
`Since the fraud transition index is 0, we need to grab the valid pre-transition as the last one in the previous block.`
)
const prevRollupBlockNumber: number = curPosition.blockNumber - 1
const prevRollupBlock: DefaultRollupBlock = new DefaultRollupBlock(
this.storedBlocks[prevRollupBlockNumber - 1].transitions,
prevRollupBlockNumber
)
log.info(
`Generating a Merklized block to build transition inclusion from the previous block...`
)
await prevRollupBlock.generateTree()
const lastTransitionInLastBlockIndex: number =
prevRollupBlock.transitions.length - 1
log.info(
`Grabbing the last transition from the previous block, it has index ${lastTransitionInLastBlockIndex}`
)
validIncludedTransition = await prevRollupBlock.getIncludedTransition(
lastTransitionInLastBlockIndex
)
}
return [
validIncludedTransition,
fraudulentIncludedTransition,
includedStorageSlots,
]
}
} | the_stack |
import { Inject, Injectable, Optional } from '@angular/core';
import { HttpClient, HttpHeaders, HttpParams,
HttpResponse, HttpEvent, HttpParameterCodec } from '@angular/common/http';
import { CustomHttpParameterCodec } from '../encoder';
import { Observable } from 'rxjs';
import { CountAnalytics, DateHistoAnalytics, GroupByAnalytics } from '../model/models';
import { ErrorResponse } from '../model/models';
import { Log } from '../model/models';
import { LogsResponse } from '../model/models';
import { BASE_PATH, COLLECTION_FORMATS } from '../variables';
import { Configuration } from '../configuration';
export interface ExportApplicationLogsByApplicationIdRequestParams {
/** Id of an application. */
applicationId: string;
/** The page number for pagination. */
page?: number;
/** The number of items per page for pagination. If the size is 0, the response contains only metadata and returns the values as for a non-paged resource. If the size is -1, the response contains all datas. */
size?: number;
/** Lower bound of timestamp for filtering. */
from?: number;
/** Upper bound of timestamp for filtering. Must be greater than *from* query param. */
to?: number;
/** Query used for filtering. */
query?: string;
/** Field used for filtering. **required** when type is **GROUP_BY**. */
field?: string;
/** Order used to sort the result list. */
order?: 'ASC' | 'DESC';
}
export interface GetApplicationAnalyticsRequestParams {
/** Id of an application. */
applicationId: string;
/** The page number for pagination. */
page?: number;
/** The number of items per page for pagination. If the size is 0, the response contains only metadata and returns the values as for a non-paged resource. If the size is -1, the response contains all datas. */
size?: number;
/** Lower bound of timestamp for filtering. */
from?: number;
/** Upper bound of timestamp for filtering. Must be greater than *from* query param. */
to?: number;
/** Interval for time search. Must be >= 1 000 and <= 1 000 000 000. */
interval?: number;
/** Query used for filtering. */
query?: string;
/** Field used for filtering. **required** when type is **GROUP_BY**. */
field?: string;
/** Type of analytics that is expected : - GROUP_BY : Used to group total hits by a specific field (Application, Status, Path, ...).\\ Query params : - from - to - interval - query - field - order - ranges - DATE_HISTO : Used to retrieve total hits per range of time, on a specific time interval.\\ Query params : - from - to - interval - query - aggs - COUNT : Used to retrieve total hits, on a specific time interval.\\ Query params : - from - to - interval - query - STATS : Used to retrieve stats data, on a specific time interval.\\ Query params : - from - to - query */
type?: 'GROUP_BY' | 'DATE_HISTO' | 'COUNT' | 'STATS';
/** Used with GROUP_BY type only. A semicolon separated list of \"from:to\" elements. **_/!\\\\ Different from *from* and *to* query params** */
ranges?: string;
/** Used with DATE_HISTO type only. A semicolon separated list of \"type:field\" elements. **_/!\\\\ Different from *type* and *field* query params**\\ Type can be **FIELD**, **AVG**, **MIN**, **MAX** */
aggs?: string;
/** Used with GROUP_BY type only. A colon separated list of \"type:field\" elements. **_/!\\\\ Different from *type* and *field* query params**\\ By default, sort is ASC. If *type* starts with \'-\', the order sort is DESC.\\ Currently, only **AVG** is supported. */
order?: string;
}
export interface GetApplicationLogByApplicationIdAndLogIdRequestParams {
/** Id of an application. */
applicationId: string;
/** Id of a log. */
logId: string;
/** Used to select the right index */
timestamp?: number;
}
export interface GetApplicationLogsRequestParams {
/** Id of an application. */
applicationId: string;
/** The page number for pagination. */
page?: number;
/** The number of items per page for pagination. If the size is 0, the response contains only metadata and returns the values as for a non-paged resource. If the size is -1, the response contains all datas. */
size?: number;
/** Lower bound of timestamp for filtering. */
from?: number;
/** Upper bound of timestamp for filtering. Must be greater than *from* query param. */
to?: number;
/** Query used for filtering. */
query?: string;
/** Field used for filtering. **required** when type is **GROUP_BY**. */
field?: string;
/** Order used to sort the result list. */
order?: 'ASC' | 'DESC';
}
@Injectable({
providedIn: 'root'
})
export class AnalyticsService {
protected basePath = 'http://localhost:8083/portal/environments/DEFAULT';
public defaultHeaders = new HttpHeaders();
public configuration = new Configuration();
public encoder: HttpParameterCodec;
constructor(protected httpClient: HttpClient, @Optional()@Inject(BASE_PATH) basePath: string, @Optional() configuration: Configuration) {
if (configuration) {
this.configuration = configuration;
}
if (typeof this.configuration.basePath !== 'string') {
if (typeof basePath !== 'string') {
basePath = this.basePath;
}
this.configuration.basePath = basePath;
}
this.encoder = this.configuration.encoder || new CustomHttpParameterCodec();
}
private addToHttpParams(httpParams: HttpParams, value: any, key?: string): HttpParams {
if (typeof value === "object" && value instanceof Date === false) {
httpParams = this.addToHttpParamsRecursive(httpParams, value);
} else {
httpParams = this.addToHttpParamsRecursive(httpParams, value, key);
}
return httpParams;
}
private addToHttpParamsRecursive(httpParams: HttpParams, value?: any, key?: string): HttpParams {
if (value == null) {
return httpParams;
}
if (typeof value === "object") {
if (Array.isArray(value)) {
(value as any[]).forEach( elem => httpParams = this.addToHttpParamsRecursive(httpParams, elem, key));
} else if (value instanceof Date) {
if (key != null) {
httpParams = httpParams.append(key,
(value as Date).toISOString().substr(0, 10));
} else {
throw Error("key may not be null if value is Date");
}
} else {
Object.keys(value).forEach( k => httpParams = this.addToHttpParamsRecursive(
httpParams, value[k], key != null ? `${key}.${k}` : k));
}
} else if (key != null) {
httpParams = httpParams.append(key, value);
} else {
throw Error("key may not be null if value is not object or array");
}
return httpParams;
}
/**
* Export application logs as CSV
* Export application logs as CSV. User must have the APPLICATION_LOG[READ] permission.
* @param requestParameters
* @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body.
* @param reportProgress flag to report request and response progress.
*/
public exportApplicationLogsByApplicationId(requestParameters: ExportApplicationLogsByApplicationIdRequestParams, observe?: 'body', reportProgress?: boolean, options?: {httpHeaderAccept?: 'text/plain' | 'application/json'}): Observable<string>;
public exportApplicationLogsByApplicationId(requestParameters: ExportApplicationLogsByApplicationIdRequestParams, observe?: 'response', reportProgress?: boolean, options?: {httpHeaderAccept?: 'text/plain' | 'application/json'}): Observable<HttpResponse<string>>;
public exportApplicationLogsByApplicationId(requestParameters: ExportApplicationLogsByApplicationIdRequestParams, observe?: 'events', reportProgress?: boolean, options?: {httpHeaderAccept?: 'text/plain' | 'application/json'}): Observable<HttpEvent<string>>;
public exportApplicationLogsByApplicationId(requestParameters: ExportApplicationLogsByApplicationIdRequestParams, observe: any = 'body', reportProgress: boolean = false, options?: {httpHeaderAccept?: 'text/plain' | 'application/json'}): Observable<any> {
const applicationId = requestParameters.applicationId;
if (applicationId === null || applicationId === undefined) {
throw new Error('Required parameter applicationId was null or undefined when calling exportApplicationLogsByApplicationId.');
}
const page = requestParameters.page;
const size = requestParameters.size;
const from = requestParameters.from;
const to = requestParameters.to;
const query = requestParameters.query;
const field = requestParameters.field;
const order = requestParameters.order;
let queryParameters = new HttpParams({encoder: this.encoder});
if (page !== undefined && page !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>page, 'page');
}
if (size !== undefined && size !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>size, 'size');
}
if (from !== undefined && from !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>from, 'from');
}
if (to !== undefined && to !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>to, 'to');
}
if (query !== undefined && query !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>query, 'query');
}
if (field !== undefined && field !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>field, 'field');
}
if (order !== undefined && order !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>order, 'order');
}
let headers = this.defaultHeaders;
// authentication (BasicAuth) required
if (this.configuration.username || this.configuration.password) {
headers = headers.set('Authorization', 'Basic ' + btoa(this.configuration.username + ':' + this.configuration.password));
}
// authentication (CookieAuth) required
if (this.configuration.apiKeys) {
const key: string | undefined = this.configuration.apiKeys["CookieAuth"] || this.configuration.apiKeys["Auth-Graviteeio-APIM"];
if (key) {
}
}
let httpHeaderAcceptSelected: string | undefined = options && options.httpHeaderAccept;
if (httpHeaderAcceptSelected === undefined) {
// to determine the Accept header
const httpHeaderAccepts: string[] = [
'text/plain',
'application/json'
];
httpHeaderAcceptSelected = this.configuration.selectHeaderAccept(httpHeaderAccepts);
}
if (httpHeaderAcceptSelected !== undefined) {
headers = headers.set('Accept', httpHeaderAcceptSelected);
}
let responseType: 'text' | 'json' = 'json';
if(httpHeaderAcceptSelected && httpHeaderAcceptSelected.startsWith('text')) {
responseType = 'text';
}
return this.httpClient.post<string>(`${this.configuration.basePath}/applications/${encodeURIComponent(String(applicationId))}/logs/_export`,
null,
{
params: queryParameters,
responseType: <any>responseType,
withCredentials: this.configuration.withCredentials,
headers: headers,
observe: observe,
reportProgress: reportProgress
}
);
}
/**
* Get Application analytics
* Get the application analytics. User must have the APPLICATION_ANALYTICS[READ] permission.
* @param requestParameters
* @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body.
* @param reportProgress flag to report request and response progress.
*/
public getApplicationAnalytics(requestParameters: GetApplicationAnalyticsRequestParams, observe?: 'body', reportProgress?: boolean, options?: {httpHeaderAccept?: 'application/json'}): Observable<DateHistoAnalytics | GroupByAnalytics | CountAnalytics>;
public getApplicationAnalytics(requestParameters: GetApplicationAnalyticsRequestParams, observe?: 'response', reportProgress?: boolean, options?: {httpHeaderAccept?: 'application/json'}): Observable<HttpResponse<DateHistoAnalytics | GroupByAnalytics | CountAnalytics>>;
public getApplicationAnalytics(requestParameters: GetApplicationAnalyticsRequestParams, observe?: 'events', reportProgress?: boolean, options?: {httpHeaderAccept?: 'application/json'}): Observable<HttpEvent<DateHistoAnalytics | GroupByAnalytics | CountAnalytics>>;
public getApplicationAnalytics(requestParameters: GetApplicationAnalyticsRequestParams, observe: any = 'body', reportProgress: boolean = false, options?: {httpHeaderAccept?: 'application/json'}): Observable<any> {
const applicationId = requestParameters.applicationId;
if (applicationId === null || applicationId === undefined) {
throw new Error('Required parameter applicationId was null or undefined when calling getApplicationAnalytics.');
}
const page = requestParameters.page;
const size = requestParameters.size;
const from = requestParameters.from;
const to = requestParameters.to;
const interval = requestParameters.interval;
const query = requestParameters.query;
const field = requestParameters.field;
const type = requestParameters.type;
const ranges = requestParameters.ranges;
const aggs = requestParameters.aggs;
const order = requestParameters.order;
let queryParameters = new HttpParams({encoder: this.encoder});
if (page !== undefined && page !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>page, 'page');
}
if (size !== undefined && size !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>size, 'size');
}
if (from !== undefined && from !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>from, 'from');
}
if (to !== undefined && to !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>to, 'to');
}
if (interval !== undefined && interval !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>interval, 'interval');
}
if (query !== undefined && query !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>query, 'query');
}
if (field !== undefined && field !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>field, 'field');
}
if (type !== undefined && type !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>type, 'type');
}
if (ranges !== undefined && ranges !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>ranges, 'ranges');
}
if (aggs !== undefined && aggs !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>aggs, 'aggs');
}
if (order !== undefined && order !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>order, 'order');
}
let headers = this.defaultHeaders;
// authentication (BasicAuth) required
if (this.configuration.username || this.configuration.password) {
headers = headers.set('Authorization', 'Basic ' + btoa(this.configuration.username + ':' + this.configuration.password));
}
// authentication (CookieAuth) required
if (this.configuration.apiKeys) {
const key: string | undefined = this.configuration.apiKeys["CookieAuth"] || this.configuration.apiKeys["Auth-Graviteeio-APIM"];
if (key) {
}
}
let httpHeaderAcceptSelected: string | undefined = options && options.httpHeaderAccept;
if (httpHeaderAcceptSelected === undefined) {
// to determine the Accept header
const httpHeaderAccepts: string[] = [
'application/json'
];
httpHeaderAcceptSelected = this.configuration.selectHeaderAccept(httpHeaderAccepts);
}
if (httpHeaderAcceptSelected !== undefined) {
headers = headers.set('Accept', httpHeaderAcceptSelected);
}
let responseType: 'text' | 'json' = 'json';
if(httpHeaderAcceptSelected && httpHeaderAcceptSelected.startsWith('text')) {
responseType = 'text';
}
return this.httpClient.get<DateHistoAnalytics | GroupByAnalytics | CountAnalytics>(`${this.configuration.basePath}/applications/${encodeURIComponent(String(applicationId))}/analytics`,
{
params: queryParameters,
responseType: <any>responseType,
withCredentials: this.configuration.withCredentials,
headers: headers,
observe: observe,
reportProgress: reportProgress
}
);
}
/**
* Get a specific log of an application
* Get a specific log of an application. User must have the APPLICATION_LOG[READ] permission.
* @param requestParameters
* @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body.
* @param reportProgress flag to report request and response progress.
*/
public getApplicationLogByApplicationIdAndLogId(requestParameters: GetApplicationLogByApplicationIdAndLogIdRequestParams, observe?: 'body', reportProgress?: boolean, options?: {httpHeaderAccept?: 'application/json'}): Observable<Log>;
public getApplicationLogByApplicationIdAndLogId(requestParameters: GetApplicationLogByApplicationIdAndLogIdRequestParams, observe?: 'response', reportProgress?: boolean, options?: {httpHeaderAccept?: 'application/json'}): Observable<HttpResponse<Log>>;
public getApplicationLogByApplicationIdAndLogId(requestParameters: GetApplicationLogByApplicationIdAndLogIdRequestParams, observe?: 'events', reportProgress?: boolean, options?: {httpHeaderAccept?: 'application/json'}): Observable<HttpEvent<Log>>;
public getApplicationLogByApplicationIdAndLogId(requestParameters: GetApplicationLogByApplicationIdAndLogIdRequestParams, observe: any = 'body', reportProgress: boolean = false, options?: {httpHeaderAccept?: 'application/json'}): Observable<any> {
const applicationId = requestParameters.applicationId;
if (applicationId === null || applicationId === undefined) {
throw new Error('Required parameter applicationId was null or undefined when calling getApplicationLogByApplicationIdAndLogId.');
}
const logId = requestParameters.logId;
if (logId === null || logId === undefined) {
throw new Error('Required parameter logId was null or undefined when calling getApplicationLogByApplicationIdAndLogId.');
}
const timestamp = requestParameters.timestamp;
let queryParameters = new HttpParams({encoder: this.encoder});
if (timestamp !== undefined && timestamp !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>timestamp, 'timestamp');
}
let headers = this.defaultHeaders;
// authentication (BasicAuth) required
if (this.configuration.username || this.configuration.password) {
headers = headers.set('Authorization', 'Basic ' + btoa(this.configuration.username + ':' + this.configuration.password));
}
// authentication (CookieAuth) required
if (this.configuration.apiKeys) {
const key: string | undefined = this.configuration.apiKeys["CookieAuth"] || this.configuration.apiKeys["Auth-Graviteeio-APIM"];
if (key) {
}
}
let httpHeaderAcceptSelected: string | undefined = options && options.httpHeaderAccept;
if (httpHeaderAcceptSelected === undefined) {
// to determine the Accept header
const httpHeaderAccepts: string[] = [
'application/json'
];
httpHeaderAcceptSelected = this.configuration.selectHeaderAccept(httpHeaderAccepts);
}
if (httpHeaderAcceptSelected !== undefined) {
headers = headers.set('Accept', httpHeaderAcceptSelected);
}
let responseType: 'text' | 'json' = 'json';
if(httpHeaderAcceptSelected && httpHeaderAcceptSelected.startsWith('text')) {
responseType = 'text';
}
return this.httpClient.get<Log>(`${this.configuration.basePath}/applications/${encodeURIComponent(String(applicationId))}/logs/${encodeURIComponent(String(logId))}`,
{
params: queryParameters,
responseType: <any>responseType,
withCredentials: this.configuration.withCredentials,
headers: headers,
observe: observe,
reportProgress: reportProgress
}
);
}
/**
* Get Application logs
* Get the application logs. User must have the APPLICATION_LOG[READ] permission.
* @param requestParameters
* @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body.
* @param reportProgress flag to report request and response progress.
*/
public getApplicationLogs(requestParameters: GetApplicationLogsRequestParams, observe?: 'body', reportProgress?: boolean, options?: {httpHeaderAccept?: 'application/json'}): Observable<LogsResponse>;
public getApplicationLogs(requestParameters: GetApplicationLogsRequestParams, observe?: 'response', reportProgress?: boolean, options?: {httpHeaderAccept?: 'application/json'}): Observable<HttpResponse<LogsResponse>>;
public getApplicationLogs(requestParameters: GetApplicationLogsRequestParams, observe?: 'events', reportProgress?: boolean, options?: {httpHeaderAccept?: 'application/json'}): Observable<HttpEvent<LogsResponse>>;
public getApplicationLogs(requestParameters: GetApplicationLogsRequestParams, observe: any = 'body', reportProgress: boolean = false, options?: {httpHeaderAccept?: 'application/json'}): Observable<any> {
const applicationId = requestParameters.applicationId;
if (applicationId === null || applicationId === undefined) {
throw new Error('Required parameter applicationId was null or undefined when calling getApplicationLogs.');
}
const page = requestParameters.page;
const size = requestParameters.size;
const from = requestParameters.from;
const to = requestParameters.to;
const query = requestParameters.query;
const field = requestParameters.field;
const order = requestParameters.order;
let queryParameters = new HttpParams({encoder: this.encoder});
if (page !== undefined && page !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>page, 'page');
}
if (size !== undefined && size !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>size, 'size');
}
if (from !== undefined && from !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>from, 'from');
}
if (to !== undefined && to !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>to, 'to');
}
if (query !== undefined && query !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>query, 'query');
}
if (field !== undefined && field !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>field, 'field');
}
if (order !== undefined && order !== null) {
queryParameters = this.addToHttpParams(queryParameters,
<any>order, 'order');
}
let headers = this.defaultHeaders;
// authentication (BasicAuth) required
if (this.configuration.username || this.configuration.password) {
headers = headers.set('Authorization', 'Basic ' + btoa(this.configuration.username + ':' + this.configuration.password));
}
// authentication (CookieAuth) required
if (this.configuration.apiKeys) {
const key: string | undefined = this.configuration.apiKeys["CookieAuth"] || this.configuration.apiKeys["Auth-Graviteeio-APIM"];
if (key) {
}
}
let httpHeaderAcceptSelected: string | undefined = options && options.httpHeaderAccept;
if (httpHeaderAcceptSelected === undefined) {
// to determine the Accept header
const httpHeaderAccepts: string[] = [
'application/json'
];
httpHeaderAcceptSelected = this.configuration.selectHeaderAccept(httpHeaderAccepts);
}
if (httpHeaderAcceptSelected !== undefined) {
headers = headers.set('Accept', httpHeaderAcceptSelected);
}
let responseType: 'text' | 'json' = 'json';
if(httpHeaderAcceptSelected && httpHeaderAcceptSelected.startsWith('text')) {
responseType = 'text';
}
return this.httpClient.get<LogsResponse>(`${this.configuration.basePath}/applications/${encodeURIComponent(String(applicationId))}/logs`,
{
params: queryParameters,
responseType: <any>responseType,
withCredentials: this.configuration.withCredentials,
headers: headers,
observe: observe,
reportProgress: reportProgress
}
);
}
} | the_stack |
import { createElement, detach } from '@syncfusion/ej2-base';
import { EditorManager } from '../../../src/editor-manager/index';
describe('Formats plugin', () => {
let innerHTML: string = `
<div style="color:red;" id="content-edit" contenteditable="true" class="e-node-deletable e-node-inner">
<p class='first-p-node'>dom node
<a href="https://www.google.com" tabindex="1">Google</a>
<label>First label Node</label>
</p>
<p class='last-p-node'>
<label>Last Label Node</label>
</p>
<h1 class="h1-tag-node" style="display:block;font-weight:500;margin-bottom:4px;font-size:17px;margin-left:4px;margin-left:4px;margin-right:4px;">Header 1</h1>
<h2 class="h2-tag-node" style="display:block;font-weight:500;margin-bottom:4px;font-size:17px;margin-left:4px;margin-left:4px;margin-right:4px;">Header 2</h2>
<ol class='ol-first'><li class="li-first">LI first</li><li class="li-second">LI Second</li></ol>
</div>
`;
describe(' Paragraph Formats testing', () => {
let editorObj: EditorManager;
let elem: HTMLElement = createElement('div', {
id: 'dom-node', innerHTML: innerHTML
});
beforeAll(() => {
document.body.appendChild(elem);
editorObj = new EditorManager({ document: document, editableElement: document.getElementById("content-edit") });
});
it(' apply p formats ', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
// Should not affect the format while apply to same format node
let start: HTMLElement = elem.querySelector('p');
let end: HTMLElement = elem.querySelector('label');
editorObj.nodeSelection.setSelectionText(document, start.childNodes[0], end, 0, 0);
editorObj.execCommand("Formats", 'p', null);
let changeNode: HTMLElement = elem.querySelector('.first-p-node');
expect(changeNode.tagName === 'P').toBe(true);
// should be apply the format from 'h1' and 'h2' to 'p'.
start = elem.querySelector('.h1-tag-node');
end = elem.querySelector('.h2-tag-node');
editorObj.nodeSelection.setSelectionText(document, start, end, 0, 0);
editorObj.execCommand("Formats", 'p', null);
start = elem.querySelector('.h1-tag-node');
end = elem.querySelector('.h2-tag-node');
expect(start.tagName === 'P').toBe(true);
expect(end.tagName === 'P').toBe(true);
// should not remove the inline style while format to 'p'
expect(start.style.display !== '').toBe(true);
expect(start.style.marginBottom !== '').toBe(true);
expect(start.style.marginLeft !== '').toBe(true);
expect(start.style.marginRight !== '').toBe(true);
expect(start.style.fontSize !== '').toBe(true);
expect(start.style.fontWeight !== '').toBe(true);
expect(end.style.display !== '').toBe(true);
expect(end.style.marginBottom !== '').toBe(true);
expect(end.style.marginLeft !== '').toBe(true);
expect(end.style.marginRight !== '').toBe(true);
expect(end.style.fontSize !== '').toBe(true);
expect(end.style.fontWeight !== '').toBe(true);
editorObj.nodeSelection.Clear(document);
});
afterAll(() => {
detach(elem);
});
});
describe(' BlockQuote Formats testing', () => {
let editorObj: EditorManager;
let elem: HTMLElement = createElement('div', {
id: 'dom-node', innerHTML: innerHTML
});
beforeAll(() => {
document.body.appendChild(elem);
editorObj = new EditorManager({ document: document, editableElement: document.getElementById("content-edit") });
});
it(' apply blockquote format', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
// Should be apply the format from 'p' to blockquote
let start: HTMLElement = elem.querySelector('p');
let end: HTMLElement = elem.querySelector('label');
editorObj.nodeSelection.setSelectionText(document, start.childNodes[0], end, 0, 0);
editorObj.execCommand("Formats", 'blockquote', null);
let changeNode: HTMLElement = elem.querySelector('.first-p-node');
expect(changeNode.tagName.toLowerCase() === 'blockquote').toBe(true);
// Should be apply the format from 'blockquote' to h1
start = elem.querySelector('blockquote');
end = elem.querySelector('label');
editorObj.nodeSelection.setSelectionText(document, start.childNodes[0], end, 0, 0);
editorObj.execCommand("Formats", 'h1', null);
changeNode = elem.querySelector('.first-p-node');
expect(changeNode.tagName.toLowerCase() === 'h1').toBe(true);
// should be apply the format from 'h1' and 'h2' to 'blockquote'.
start = elem.querySelector('.h1-tag-node');
end = elem.querySelector('.h2-tag-node');
editorObj.nodeSelection.setSelectionText(document, start, end, 0, 0);
editorObj.execCommand("Formats", 'blockquote', null);
start = elem.querySelector('.h1-tag-node');
end = elem.querySelector('.h2-tag-node');
expect(start.tagName.toLowerCase() === 'blockquote').toBe(true);
expect(end.tagName.toLowerCase() === 'blockquote').toBe(true);
// should be apply the blockquote format to parent node of 'OL''.
start = elem.querySelector('.li-first');
end = elem.querySelector('.li-second');
editorObj.nodeSelection.setSelectionText(document, start.childNodes[0], end.childNodes[0], 0, 0);
editorObj.execCommand("Formats", 'blockquote', null);
start = elem.querySelector('.ol-first');
expect((start as Element).tagName.toLowerCase() === 'blockquote').toBe(true);
editorObj.nodeSelection.Clear(document);
});
afterAll(() => {
detach(elem);
});
});
describe(' Pre Formats testing', () => {
let editorObj: EditorManager;
let elem: HTMLElement = createElement('div', {
id: 'dom-node', innerHTML: innerHTML
});
beforeAll(() => {
document.body.appendChild(elem);
editorObj = new EditorManager({ document: document, editableElement: document.getElementById("content-edit") });
});
it(' apply pre format', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
// Should be apply the format from 'p' to pre
let start: HTMLElement = elem.querySelector('p');
let end: HTMLElement = elem.querySelector('label');
editorObj.nodeSelection.setSelectionText(document, start.childNodes[0], end, 0, 0);
editorObj.execCommand("Formats", 'pre', null);
let changeNode: HTMLElement = elem.querySelector('.first-p-node');
expect(changeNode.tagName.toLowerCase() === 'pre').toBe(true);
// should be apply the format from 'h1' and 'h2' to 'pre'.
start = elem.querySelector('.h1-tag-node');
end = elem.querySelector('.h2-tag-node');
editorObj.nodeSelection.setSelectionText(document, start, end, 0, 0);
editorObj.execCommand("Formats", 'pre', null);
start = elem.querySelector('.h1-tag-node');
expect(start.tagName.toLowerCase() === 'pre').toBe(true);
expect(start.innerHTML === 'Header 1<br>Header 2').toBe(true);
editorObj.nodeSelection.Clear(document);
});
afterAll(() => {
detach(elem);
});
});
describe(' Header 1 Formats testing', () => {
let editorObj: EditorManager;
let elem: HTMLElement = createElement('div', {
id: 'dom-node', innerHTML: innerHTML
});
beforeAll(() => {
document.body.appendChild(elem);
editorObj = new EditorManager({ document: document, editableElement: document.getElementById("content-edit") });
});
it(' apply h1 format', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
// Should be apply the format from 'p' to 'h1'
let start: HTMLElement = elem.querySelector('p');
let end: HTMLElement = elem.querySelector('label');
editorObj.nodeSelection.setSelectionText(document, start.childNodes[0], end, 0, 0);
editorObj.execCommand("Formats", 'h1', null);
let changeNode: HTMLElement = elem.querySelector('.first-p-node');
expect(changeNode.tagName.toLowerCase() === 'h1').toBe(true);
// should be apply the format from 'h2' to 'h1' and ignore the same format tag.
start = elem.querySelector('.h1-tag-node');
end = elem.querySelector('.h2-tag-node');
editorObj.nodeSelection.setSelectionText(document, start, end, 0, 0);
editorObj.execCommand("Formats", 'h1', null);
start = elem.querySelector('.h1-tag-node');
end = elem.querySelector('.h2-tag-node');
expect(start.tagName.toLowerCase() === 'h1').toBe(true);
expect(end.tagName.toLowerCase() === 'h1').toBe(true);
// should be remove the inline default styles of h1 tag.
expect(end.style.display === '').toBe(true);
expect(end.style.marginBottom === '').toBe(true);
expect(end.style.marginLeft === '').toBe(true);
expect(end.style.marginRight === '').toBe(true);
expect(end.style.fontSize === '').toBe(true);
expect(end.style.fontWeight === '').toBe(true);
editorObj.nodeSelection.Clear(document);
});
afterAll(() => {
detach(elem);
});
});
describe(' Header 2 Formats testing', () => {
let editorObj: EditorManager;
let elem: HTMLElement = createElement('div', {
id: 'dom-node', innerHTML: innerHTML
});
beforeAll(() => {
document.body.appendChild(elem);
editorObj = new EditorManager({ document: document, editableElement: document.getElementById("content-edit") });
});
it(' apply h2 format', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
// Should be apply the format from 'p' to 'h2'
let start: HTMLElement = elem.querySelector('p');
let end: HTMLElement = elem.querySelector('label');
editorObj.nodeSelection.setSelectionText(document, start.childNodes[0], end, 0, 0);
editorObj.execCommand("Formats", 'h2', null);
let changeNode: HTMLElement = elem.querySelector('.first-p-node');
expect(changeNode.tagName.toLowerCase() === 'h2').toBe(true);
// should be apply the format from 'h1' to 'h2' and ignore the same format tag.
start = elem.querySelector('.h1-tag-node');
end = elem.querySelector('.h2-tag-node');
editorObj.nodeSelection.setSelectionText(document, start, end, 0, 0);
editorObj.execCommand("Formats", 'h2', null);
start = elem.querySelector('.h1-tag-node');
end = elem.querySelector('.h2-tag-node');
expect(start.tagName.toLowerCase() === 'h2').toBe(true);
expect(end.tagName.toLowerCase() === 'h2').toBe(true);
// should be remove the inline default styles of h2 tag.
expect(start.style.display === '').toBe(true);
expect(start.style.marginBottom === '').toBe(true);
expect(start.style.marginLeft === '').toBe(true);
expect(start.style.marginRight === '').toBe(true);
expect(start.style.fontSize === '').toBe(true);
expect(start.style.fontWeight === '').toBe(true);
editorObj.nodeSelection.Clear(document);
});
afterAll(() => {
detach(elem);
});
});
describe(' Header 3 Formats testing', () => {
let editorObj: EditorManager;
let elem: HTMLElement = createElement('div', {
id: 'dom-node', innerHTML: innerHTML
});
beforeAll(() => {
document.body.appendChild(elem);
editorObj = new EditorManager({ document: document, editableElement: document.getElementById("content-edit") });
});
it(' apply h3 format', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
// Should be apply the format from 'p' to 'h3'
let start: HTMLElement = elem.querySelector('p');
let end: HTMLElement = elem.querySelector('label');
editorObj.nodeSelection.setSelectionText(document, start.childNodes[0], end, 0, 0);
editorObj.execCommand("Formats", 'h3', null);
let changeNode: HTMLElement = elem.querySelector('.first-p-node');
expect(changeNode.tagName.toLowerCase() === 'h3').toBe(true);
// should be apply the format from 'h1' and 'h2' to 'h3'
start = elem.querySelector('.h1-tag-node');
end = elem.querySelector('.h2-tag-node');
editorObj.nodeSelection.setSelectionText(document, start, end, 0, 0);
editorObj.execCommand("Formats", 'h3', null);
start = elem.querySelector('.h1-tag-node');
end = elem.querySelector('.h2-tag-node');
expect(start.tagName.toLowerCase() === 'h3').toBe(true);
expect(end.tagName.toLowerCase() === 'h3').toBe(true);
// should be remove the inline default styles of h3 tag.
expect(start.style.display === '').toBe(true);
expect(start.style.marginBottom === '').toBe(true);
expect(start.style.marginLeft === '').toBe(true);
expect(start.style.marginRight === '').toBe(true);
expect(start.style.fontSize === '').toBe(true);
expect(start.style.fontWeight === '').toBe(true);
expect(end.style.display === '').toBe(true);
expect(end.style.marginBottom === '').toBe(true);
expect(end.style.marginLeft === '').toBe(true);
expect(end.style.marginRight === '').toBe(true);
expect(end.style.fontSize === '').toBe(true);
expect(end.style.fontWeight === '').toBe(true);
editorObj.nodeSelection.Clear(document);
});
afterAll(() => {
detach(elem);
});
});
describe(' Header 4 Formats testing', () => {
let editorObj: EditorManager;
let elem: HTMLElement = createElement('div', {
id: 'dom-node', innerHTML: innerHTML
});
beforeAll(() => {
document.body.appendChild(elem);
editorObj = new EditorManager({ document: document, editableElement: document.getElementById("content-edit") });
});
it(' apply h4 format', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
// Should be apply the format from 'p' to 'h4'
let start: HTMLElement = elem.querySelector('p');
let end: HTMLElement = elem.querySelector('label');
editorObj.nodeSelection.setSelectionText(document, start.childNodes[0], end, 0, 0);
editorObj.execCommand("Formats", 'h4', null);
let changeNode: HTMLElement = elem.querySelector('.first-p-node');
expect(changeNode.tagName.toLowerCase() === 'h4').toBe(true);
// should be apply the format from 'h1' and 'h2' to 'h4'
start = elem.querySelector('.h1-tag-node');
end = elem.querySelector('.h2-tag-node');
editorObj.nodeSelection.setSelectionText(document, start, end, 0, 0);
editorObj.execCommand("Formats", 'h4', null);
start = elem.querySelector('.h1-tag-node');
end = elem.querySelector('.h2-tag-node');
expect(start.tagName.toLowerCase() === 'h4').toBe(true);
expect(end.tagName.toLowerCase() === 'h4').toBe(true);
// should be remove the inline default styles of h4 tag.
expect(start.style.display === '').toBe(true);
expect(start.style.marginBottom === '').toBe(true);
expect(start.style.marginLeft === '').toBe(true);
expect(start.style.marginRight === '').toBe(true);
expect(start.style.fontSize === '').toBe(true);
expect(start.style.fontWeight === '').toBe(true);
expect(end.style.display === '').toBe(true);
expect(end.style.marginBottom === '').toBe(true);
expect(end.style.marginLeft === '').toBe(true);
expect(end.style.marginRight === '').toBe(true);
expect(end.style.fontSize === '').toBe(true);
expect(end.style.fontWeight === '').toBe(true);
editorObj.nodeSelection.Clear(document);
});
afterAll(() => {
detach(elem);
});
});
describe(' Header 5 Formats testing', () => {
let editorObj: EditorManager;
let elem: HTMLElement = createElement('div', {
id: 'dom-node', innerHTML: innerHTML
});
beforeAll(() => {
document.body.appendChild(elem);
editorObj = new EditorManager({ document: document, editableElement: document.getElementById("content-edit") });
});
it(' apply h5 format', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
// Should be apply the format from 'p' to 'h5'
let start: HTMLElement = elem.querySelector('p');
let end: HTMLElement = elem.querySelector('label');
editorObj.nodeSelection.setSelectionText(document, start.childNodes[0], end, 0, 0);
editorObj.execCommand("Formats", 'h5', null);
let changeNode: HTMLElement = elem.querySelector('.first-p-node');
expect(changeNode.tagName.toLowerCase() === 'h5').toBe(true);
// should be apply the format from 'h1' and 'h2' to 'h5'
start = elem.querySelector('.h1-tag-node');
end = elem.querySelector('.h2-tag-node');
editorObj.nodeSelection.setSelectionText(document, start, end, 0, 0);
editorObj.execCommand("Formats", 'h5', null);
start = elem.querySelector('.h1-tag-node');
end = elem.querySelector('.h2-tag-node');
expect(start.tagName.toLowerCase() === 'h5').toBe(true);
expect(end.tagName.toLowerCase() === 'h5').toBe(true);
// should be remove the inline default styles of h5 tag.
expect(start.style.display === '').toBe(true);
expect(start.style.marginBottom === '').toBe(true);
expect(start.style.marginLeft === '').toBe(true);
expect(start.style.marginRight === '').toBe(true);
expect(start.style.fontSize === '').toBe(true);
expect(start.style.fontWeight === '').toBe(true);
expect(end.style.display === '').toBe(true);
expect(end.style.marginBottom === '').toBe(true);
expect(end.style.marginLeft === '').toBe(true);
expect(end.style.marginRight === '').toBe(true);
expect(end.style.fontSize === '').toBe(true);
expect(end.style.fontWeight === '').toBe(true);
editorObj.nodeSelection.Clear(document);
});
afterAll(() => {
detach(elem);
});
});
describe(' Header 6 Formats testing', () => {
let editorObj: EditorManager;
let elem: HTMLElement = createElement('div', {
id: 'dom-node', innerHTML: innerHTML
});
beforeAll(() => {
document.body.appendChild(elem);
editorObj = new EditorManager({ document: document, editableElement: document.getElementById("content-edit") });
});
it(' apply h6 format', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
// Should be apply the format from 'p' to 'h6'
let start: HTMLElement = elem.querySelector('p');
let end: HTMLElement = elem.querySelector('label');
editorObj.nodeSelection.setSelectionText(document, start.childNodes[0], end, 0, 0);
editorObj.execCommand("Formats", 'h6', null);
let changeNode: HTMLElement = elem.querySelector('.first-p-node');
expect(changeNode.tagName.toLowerCase() === 'h6').toBe(true);
// should be apply the format from 'h1' and 'h2' to 'h6'
start = elem.querySelector('.h1-tag-node');
end = elem.querySelector('.h2-tag-node');
editorObj.nodeSelection.setSelectionText(document, start, end, 0, 0);
editorObj.execCommand("Formats", 'h6', null);
start = elem.querySelector('.h1-tag-node');
end = elem.querySelector('.h2-tag-node');
expect(start.tagName.toLowerCase() === 'h6').toBe(true);
expect(end.tagName.toLowerCase() === 'h6').toBe(true);
// should be remove the inline default styles of h6 tag.
expect(start.style.display === '').toBe(true);
expect(start.style.marginBottom === '').toBe(true);
expect(start.style.marginLeft === '').toBe(true);
expect(start.style.marginRight === '').toBe(true);
expect(start.style.fontSize === '').toBe(true);
expect(start.style.fontWeight === '').toBe(true);
expect(end.style.display === '').toBe(true);
expect(end.style.marginBottom === '').toBe(true);
expect(end.style.marginLeft === '').toBe(true);
expect(end.style.marginRight === '').toBe(true);
expect(end.style.fontSize === '').toBe(true);
expect(end.style.fontWeight === '').toBe(true);
editorObj.nodeSelection.Clear(document);
});
afterAll(() => {
detach(elem);
});
});
describe(' Header 6 Formats testing', () => {
let editorObj: EditorManager;
let tableContent: string = `<div style="color:red;" id="content-edit" contenteditable="true" class="e-node-deletable e-node-inner"><p id='p1'>Paragraph 1</p><p id='p2'>Paragraph 2</p><p id='p3'>Paragraph 3</p><table><tbody><tr><td>cell 1 1</td><td>cell 1 2</td></tr><tr><td>cell 2 1</td><td id="lastCell">Cell 2 2</td></tr></tbody></table></div>`;
let elem: HTMLElement = createElement('div', {
id: 'dom-node', innerHTML: tableContent
});
beforeAll(() => {
document.body.appendChild(elem);
editorObj = new EditorManager({ document: document, editableElement: document.getElementById("content-edit") });
});
it('select all with last element as table', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
let start: HTMLElement = elem.querySelector('#p1');
let end: HTMLElement = elem.querySelector('#lastCell');
editorObj.nodeSelection.setSelectionText(document, start.childNodes[0], editorObj.editableElement, 0, 4);
editorObj.execCommand("Formats", 'h6', null);
expect(window.getSelection().focusNode.textContent === 'Cell 2 2').toBe(true);
expect(window.getSelection().focusOffset === 8).toBe(true);
editorObj.nodeSelection.Clear(document);
});
afterAll(() => {
detach(elem);
});
});
describe(' Pre Formats testing', () => {
let editorObj: EditorManager;
let tableContent: string = `<div style="color:red;" id="content-edit" contenteditable="true" class="e-node-deletable e-node-inner"><ol><li class="l1">list1</li><li>list2</li><li class="l3">list3</li></ol></div>`;
let elem: HTMLElement = createElement('div', {
id: 'dom-node', innerHTML: tableContent
});
beforeAll(() => {
document.body.appendChild(elem);
editorObj = new EditorManager({ document: document, editableElement: document.getElementById("content-edit") });
});
it('Apply pre to List with out P tag in list', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
let start: HTMLElement = elem.querySelector('.l1');
let end: HTMLElement = elem.querySelector('.l3');
editorObj.nodeSelection.setSelectionText(document, start.childNodes[0], end.childNodes[0], 0, 2);
editorObj.execCommand("Formats", 'pre', null);
expect(elem.querySelector('pre.l1').textContent === 'list1').toBe(true);
expect(elem.querySelector('pre.l3').textContent === 'list3').toBe(true);
editorObj.nodeSelection.Clear(document);
});
afterAll(() => {
detach(elem);
});
});
describe(' Pre format testing', () => {
let editorObj: EditorManager;
let tableContent: string = `<div style="color:red;" id="content-edit" contenteditable="true" class="e-node-deletable e-node-inner"><p id='p1'>Paragraph 1</p><p id='p2'>Paragraph 2</p><p id='p3'>Paragraph 3</p><table><tbody><tr><td>cell 1 1</td><td>cell 1 2</td></tr><tr><td>cell 2 1</td><td id="lastCell">Cell 2 2</td></tr></tbody></table></div>`;
let elem: HTMLElement = createElement('div', {
id: 'dom-node', innerHTML: tableContent
});
beforeAll(() => {
document.body.appendChild(elem);
editorObj = new EditorManager({ document: document, editableElement: document.getElementById("content-edit") });
});
it('select all with last element as table', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
let start: HTMLElement = elem.querySelector('#p1');
let end: HTMLElement = elem.querySelector('#lastCell');
editorObj.nodeSelection.setSelectionText(document, start.childNodes[0], editorObj.editableElement, 0, 4);
editorObj.execCommand("Formats", 'pre', null);
expect(elem.querySelector('#p1').innerHTML === 'Paragraph 1<br>Paragraph 2<br>Paragraph 3').toBe(true);
expect(elem.querySelector('table').querySelectorAll('pre').length === 4).toBe(true);
editorObj.nodeSelection.Clear(document);
});
afterAll(() => {
detach(elem);
});
});
describe(' Pre format Enter key testing', () => {
let editorObj: EditorManager;
let keyBoardEvent: any = { callBack: () => { }, event: { action: null, preventDefault: () => { }, shiftKey: false, which: 13 } };
let tableContent: string = `<div style="color:red;" id="content-edit" contenteditable="true" class="e-node-deletable e-node-inner">
<pre id='p1'>Paragraph 1<br>Paragraph 2<br>Paragraph 3<br>Paragraph 4<br>Paragraph 5<br>Paragraph 6</pre>
</div>`;
let elem: HTMLElement = createElement('div', {
id: 'dom-node', innerHTML: tableContent
});
beforeAll(() => {
document.body.appendChild(elem);
editorObj = new EditorManager({ document: document, editableElement: document.getElementById("content-edit") });
});
it('Enter key at the start of the pre text', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
let start: HTMLElement = elem.querySelector('#p1');
editorObj.nodeSelection.setCursorPoint(document, start.childNodes[0] as Element, 0);
expect(start.innerHTML === 'Paragraph 1<br>Paragraph 2<br>Paragraph 3<br>Paragraph 4<br>Paragraph 5<br>Paragraph 6').toBe(true);
(editorObj as any).editorKeyDown(keyBoardEvent);
expect(start.innerHTML === '<br>Paragraph 1<br>Paragraph 2<br>Paragraph 3<br>Paragraph 4<br>Paragraph 5<br>Paragraph 6').toBe(true);
});
it('Enter key at the middle of the pre text', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
let start: HTMLElement = elem.querySelector('#p1');
editorObj.nodeSelection.setCursorPoint(document, start.childNodes[1] as Element, 4);
expect(start.innerHTML === '<br>Paragraph 1<br>Paragraph 2<br>Paragraph 3<br>Paragraph 4<br>Paragraph 5<br>Paragraph 6').toBe(true);
(editorObj as any).editorKeyDown(keyBoardEvent);
let expectedElem: HTMLElement = elem.querySelector('#p1');
expect(expectedElem.innerHTML === '<br>Para<br>graph 1<br>Paragraph 2<br>Paragraph 3<br>Paragraph 4<br>Paragraph 5<br>Paragraph 6').toBe(true);
});
it('Enter key at the middle of the pre text but at the end of the current line', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
let start: HTMLElement = elem.querySelector('#p1');
editorObj.nodeSelection.setCursorPoint(document, start.childNodes[1] as Element, 4);
expect(start.innerHTML === '<br>Para<br>graph 1<br>Paragraph 2<br>Paragraph 3<br>Paragraph 4<br>Paragraph 5<br>Paragraph 6').toBe(true);
(editorObj as any).editorKeyDown(keyBoardEvent);
let expectedElem: HTMLElement = elem.querySelector('#p1');
expect(expectedElem.innerHTML === '<br>Para<br><br>graph 1<br>Paragraph 2<br>Paragraph 3<br>Paragraph 4<br>Paragraph 5<br>Paragraph 6').toBe(true);
});
it('Enter key at the start of the pre textbefore br tag', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
let start: HTMLElement = elem.querySelector('#p1');
editorObj.nodeSelection.setCursorPoint(document, start.childNodes[2] as Element, 0);
expect(start.innerHTML === '<br>Para<br><br>graph 1<br>Paragraph 2<br>Paragraph 3<br>Paragraph 4<br>Paragraph 5<br>Paragraph 6').toBe(true);
(editorObj as any).editorKeyDown(keyBoardEvent);
let expectedElem: HTMLElement = elem.querySelector('#p1');
expect(expectedElem.innerHTML === '<br><br>Para<br><br>graph 1<br>Paragraph 2<br>Paragraph 3<br>Paragraph 4<br>Paragraph 5<br>Paragraph 6').toBe(true);
});
it('Enter key at the middle of the pre text inbetween br tag', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
let start: HTMLElement = elem.querySelector('#p1');
editorObj.nodeSelection.setCursorPoint(document, start as Element, 4);
expect(start.innerHTML === '<br><br>Para<br><br>graph 1<br>Paragraph 2<br>Paragraph 3<br>Paragraph 4<br>Paragraph 5<br>Paragraph 6').toBe(true);
(editorObj as any).editorKeyDown(keyBoardEvent);
let expectedElem: HTMLElement = elem.querySelector('#p1');
expect(expectedElem.innerHTML === '<br><br>Para<br><br><br>graph 1<br>Paragraph 2<br>Paragraph 3<br>Paragraph 4<br>Paragraph 5<br>Paragraph 6').toBe(true);
});
it('Enter key with selection at the middle of the pre tag', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
let start: HTMLElement = elem.querySelector('#p1');
editorObj.nodeSelection.setSelectionText(document, start.childNodes[2], start.childNodes[6], 1, 3);
expect(start.innerHTML === '<br><br>Para<br><br><br>graph 1<br>Paragraph 2<br>Paragraph 3<br>Paragraph 4<br>Paragraph 5<br>Paragraph 6').toBe(true);
(editorObj as any).editorKeyDown(keyBoardEvent);
let expectedElem: HTMLElement = elem.querySelector('#p1');
expect(expectedElem.innerHTML === '<br><br>P<br>ph 1<br>Paragraph 2<br>Paragraph 3<br>Paragraph 4<br>Paragraph 5<br>Paragraph 6').toBe(true);
});
it('Enter key with selection as start to middle of the pre tag', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
let start: HTMLElement = elem.querySelector('#p1');
editorObj.nodeSelection.setSelectionText(document, start, start, 0, 10);
expect(start.innerHTML === '<br><br>P<br>ph 1<br>Paragraph 2<br>Paragraph 3<br>Paragraph 4<br>Paragraph 5<br>Paragraph 6').toBe(true);
(editorObj as any).editorKeyDown(keyBoardEvent);
let expectedElem: HTMLElement = elem.querySelector('#p1');
expect(expectedElem.innerHTML === '<br>Paragraph 4<br>Paragraph 5<br>Paragraph 6').toBe(true);
});
it('Enter key with end of the pre tag', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
let start: HTMLElement = elem.querySelector('#p1');
editorObj.nodeSelection.setCursorPoint(document, start.childNodes[5] as Element, 11);
expect(start.innerHTML === '<br>Paragraph 4<br>Paragraph 5<br>Paragraph 6').toBe(true);
(editorObj as any).editorKeyDown(keyBoardEvent);
let expectedElem: HTMLElement = elem.querySelector('#p1');
expect(expectedElem.innerHTML === '<br>Paragraph 4<br>Paragraph 5<br>Paragraph 6<br><br>').toBe(true);
});
it('Double enter key press at the end of the pre tag', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
let start: HTMLElement = elem.querySelector('#p1');
editorObj.nodeSelection.setCursorPoint(document, start.childNodes[7] as Element, 0);
(editorObj as any).editorKeyDown(keyBoardEvent);
let expectedElem: HTMLElement = elem.querySelector('#p1');
expect(expectedElem.innerHTML === `<br>Paragraph 4<br>Paragraph 5<br>Paragraph 6<br>`).toBe(true);
expect(expectedElem.nextElementSibling.tagName === 'P').toBe(true);
});
afterAll(() => {
detach(elem);
});
});
describe(' Pre format Enter key testing - 2', () => {
let editorObj: EditorManager;
let keyBoardEvent: any = { callBack: () => { }, event: { action: null, preventDefault: () => { }, shiftKey: false, which: 13 } };
let tableContent: string = `<div style="color:red;" id="content-edit" contenteditable="true" class="e-node-deletable e-node-inner">
<p id="prePara">previous paragraph</p><pre id='p1'>Paragraph 1<br>Paragraph 2</pre><p id="nextPara">Paragraph</p>
</div>`;
let elem: HTMLElement = createElement('div', {
id: 'dom-node', innerHTML: tableContent
});
beforeAll(() => {
document.body.appendChild(elem);
editorObj = new EditorManager({ document: document, editableElement: document.getElementById("content-edit") });
});
it('Enter key at the start of the pre text', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
let start: HTMLElement = elem.querySelector('#prePara');
let end: HTMLElement = elem.querySelector('#p1');
editorObj.nodeSelection.setSelectionText(document, start.childNodes[0], end.childNodes[0], 0, 4);
(editorObj as any).editorKeyDown(keyBoardEvent);
expect(elem.querySelector('#prePara').textContent === '').toBe(true);
expect(elem.querySelector('#p1').innerHTML === 'graph 1<br>Paragraph 2').toBe(true);
});
it('Enter key at the start of the pre text', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
let start: HTMLElement = elem.querySelector('#p1');
let end: HTMLElement = elem.querySelector('#nextPara');
editorObj.nodeSelection.setSelectionText(document, start.childNodes[2], end.childNodes[0], 4, 9);
(editorObj as any).editorKeyDown(keyBoardEvent);
expect(elem.querySelector('#nextPara').textContent === '').toBe(true);
expect(elem.querySelector('#p1').innerHTML === 'graph 1<br>Para').toBe(true);
});
'<pre><span;">span 1</span><br><span>span 2</span><br><span>span 3</span><br><br><span id="selectSpan"></span><br><span>span 5</span></pre>'
afterAll(() => {
detach(elem);
});
});
describe(' Pre format Enter key testing - 3', () => {
let editorObj: EditorManager;
let keyBoardEvent: any = { callBack: () => { }, event: { action: null, preventDefault: () => { }, shiftKey: false, which: 13 } };
let tableContent: string = `<div style="color:red;" id="content-edit" contenteditable="true" class="e-node-deletable e-node-inner">
<pre><span;">span 1</span><br><span>span 2</span><br><span>span 3</span><br><br><span id="selectSpan">revanth</span><br><span>span 5</span></pre>
</div>`;
let elem: HTMLElement = createElement('div', {
id: 'dom-node', innerHTML: tableContent
});
beforeAll(() => {
document.body.appendChild(elem);
editorObj = new EditorManager({ document: document, editableElement: document.getElementById("content-edit") });
});
it('Enter key at the middle of the pre text with style and span inside', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
let start: HTMLElement = elem.querySelector('#selectSpan');
editorObj.nodeSelection.setCursorPoint(document, start.childNodes[0] as Element, 7);
expect(elem.querySelectorAll('br').length === 5).toBe(true);
(editorObj as any).editorKeyDown(keyBoardEvent);
expect(elem.querySelectorAll('br').length === 6).toBe(true);
});
afterAll(() => {
detach(elem);
});
});
describe(' Pre format Enter key with list as parent testing - 3', () => {
let editorObj: EditorManager;
let keyBoardEvent: any = { callBack: () => { }, event: { action: null, preventDefault: () => { }, shiftKey: false, which: 13 } };
let tableContent: string = `<div style="color:red;" id="content-edit" contenteditable="true" class="e-node-deletable e-node-inner">
<ol><li><pre id='listPre'>para</pre></li></ol>
</div>`;
let elem: HTMLElement = createElement('div', {
id: 'dom-node', innerHTML: tableContent
});
beforeAll(() => {
document.body.appendChild(elem);
editorObj = new EditorManager({ document: document, editableElement: document.getElementById("content-edit") });
});
it('Enter key at the end of the list', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
let start: HTMLElement = elem.querySelector('#listPre');
editorObj.nodeSelection.setCursorPoint(document, start.childNodes[0] as Element, 4);
(editorObj as any).editorKeyDown(keyBoardEvent);
expect(elem.querySelectorAll('br').length === 0).toBe(true);
});
afterAll(() => {
detach(elem);
});
});
describe('Empty pre format with testing', () => {
let editorObj: EditorManager;
let keyBoardEvent: any = { callBack: () => { }, event: { action: null, preventDefault: () => { }, shiftKey: false, which: 13 } };
let tableContent: string = `<div style="color:red;" id="content-edit" contenteditable="true" class="e-node-deletable e-node-inner">
<pre id="selectPre"><br></pre>
</div>`;
let elem: HTMLElement = createElement('div', {
id: 'dom-node', innerHTML: tableContent
});
beforeAll(() => {
document.body.appendChild(elem);
editorObj = new EditorManager({ document: document, editableElement: document.getElementById("content-edit") });
});
it('Enter key press when pre is empty', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
let start: HTMLElement = elem.querySelector('#selectPre');
editorObj.nodeSelection.setCursorPoint(document, start.childNodes[0] as Element, 2);
(editorObj as any).editorKeyDown(keyBoardEvent);
expect(elem.querySelectorAll('br').length === 2).toBe(true);
});
afterAll(() => {
detach(elem);
});
});
describe(' Apply heading format with content having commensts tag testing', () => {
let editorObj: EditorManager;
let tableContent: string = `<div style="color:red;" id="content-edit" contenteditable="true" class="e-node-deletable e-node-inner"><p id="p1">The rich text editor is WYSIWYG ("what you see is what you get") editor useful to create and edit content, and return the valid <!-- /react-text --><a href="https://ej2.syncfusion.com/home/" target="_blank">HTML markup</a><!-- react-text: 30 --> or <!-- /react-text --><a href="https://ej2.syncfusion.com/home/" target="_blank">markdown</a><!-- react-text: 32 --> of the content<!-- /react-text --></p><!-- react-text: 33 --> <!-- /react-text --><p id="lastPara">Toolbar</p></div>`;
let elem: HTMLElement = createElement('div', {
id: 'dom-node', innerHTML: tableContent
});
beforeAll(() => {
document.body.appendChild(elem);
editorObj = new EditorManager({ document: document, editableElement: document.getElementById("content-edit") });
});
it('select all with last element as table', () => {
let elem: HTMLElement = editorObj.editableElement as HTMLElement;
let start: HTMLElement = elem.querySelector('#p1');
let end: HTMLElement = elem.querySelector('#lastPara');
editorObj.nodeSelection.setSelectionText(document, start.childNodes[0], end.childNodes[0], 0, 7);
editorObj.execCommand("Formats", 'h1', null);
expect(elem.querySelector('#p1').tagName === 'H1').toBe(true);
expect(elem.querySelector('#lastPara').tagName === 'H1').toBe(true);
editorObj.nodeSelection.Clear(document);
});
afterAll(() => {
detach(elem);
});
});
}); | the_stack |
import type {
PassthroughErrorInstance,
ExternalMethods,
ReadModelCursor,
ReadModelProcedureLedger,
ProcedureResult,
EventThreadData,
ReadModelEvent,
} from './types'
import getLog from './get-log'
import { LeveledDebugger } from '@resolve-js/debug-levels'
const serializeError = (error: Error & { code: number }) =>
error != null
? {
name: error.name == null ? null : String(error.name),
code: error.code == null ? null : String(error.code),
message: String(error.message),
stack: String(error.stack),
}
: null
const immediatelyStopError = new Error('ImmediatelyStopError')
const immediatelyStopTimeout = 45 * 1000
const buildInit: (
currentPool: {
ledgerTableNameAsId: string
databaseNameAsId: string
eventTypes: Array<string> | null
inputCursor: ReadModelCursor
readModelLedger: ReadModelProcedureLedger
xaKey: string
log: LeveledDebugger
},
...args: Parameters<ExternalMethods['build']>
) => ReturnType<ExternalMethods['build']> = async (
currentPool,
basePool,
readModelName,
store,
modelInterop,
next,
eventstoreAdapter
) => {
const pool = { ...basePool, ...currentPool }
const {
PassthroughError,
inlineLedgerRunQuery,
generateGuid,
escapeStr,
databaseNameAsId,
ledgerTableNameAsId,
xaKey,
log,
} = pool
const rootSavePointId = generateGuid(xaKey, 'ROOT')
log.debug(`Begin init transaction`)
await inlineLedgerRunQuery(
`BEGIN TRANSACTION;
SET TRANSACTION ISOLATION LEVEL REPEATABLE READ;
SAVEPOINT ${rootSavePointId};
WITH "CTE" AS (
SELECT "XaKey" FROM ${databaseNameAsId}.${ledgerTableNameAsId}
WHERE "EventSubscriber" = ${escapeStr(readModelName)}
AND "XaKey" = ${escapeStr(xaKey)}
AND "IsPaused" = FALSE
AND "Errors" IS NULL
FOR NO KEY UPDATE NOWAIT
)
SELECT 1/Count("CTE"."XaKey") AS "NonZero" FROM "CTE";
`,
true
)
log.debug(`Building of cursor`)
const nextCursor = await eventstoreAdapter.getNextCursor(null, [])
try {
const handler = await modelInterop.acquireInitHandler(store)
if (handler != null) {
log.debug(`Running of init handler`)
await handler()
}
// TODO Init via plv8
log.debug(`Commit transaction with SuccessEvent`)
await inlineLedgerRunQuery(
`UPDATE ${databaseNameAsId}.${ledgerTableNameAsId}
SET "SuccessEvent" = ${escapeStr(JSON.stringify({ type: 'Init' }))},
"Cursor" = ${escapeStr(JSON.stringify(nextCursor))}
WHERE "EventSubscriber" = ${escapeStr(readModelName)};
COMMIT;
`
)
await next()
} catch (error) {
if (error instanceof PassthroughError) {
throw error
}
log.debug(`Init handler execution failed. Commit transaction with error`)
await inlineLedgerRunQuery(
`UPDATE ${databaseNameAsId}.${ledgerTableNameAsId}
SET "Errors" = jsonb_insert(
COALESCE("Errors", jsonb('[]')),
CAST(('{' || jsonb_array_length(COALESCE("Errors", jsonb('[]'))) || '}') AS TEXT[]),
jsonb(${escapeStr(JSON.stringify(serializeError(error)))})
),
"FailedEvent" = ${escapeStr(JSON.stringify({ type: 'Init' }))},
"Cursor" = ${escapeStr(JSON.stringify(nextCursor))}
WHERE "EventSubscriber" = ${escapeStr(readModelName)};
COMMIT;
`
)
}
}
const buildEvents: (
currentPool: {
ledgerTableNameAsId: string
databaseNameAsId: string
eventTypes: Array<string> | null
inputCursor: ReadModelCursor
readModelLedger: ReadModelProcedureLedger
xaKey: string
metricData: any
log: LeveledDebugger
},
...args: Parameters<ExternalMethods['build']>
) => ReturnType<ExternalMethods['build']> = async (
currentPool,
basePool,
readModelName,
store,
modelInterop,
next,
eventstoreAdapter,
getVacantTimeInMillis,
buildInfo
) => {
const pool = { ...basePool, ...currentPool }
const {
readModelLedger: { IsProcedural: inputIsProcedural },
PassthroughError,
checkEventsContinuity,
inlineLedgerRunQuery,
generateGuid,
escapeStr,
databaseNameAsId,
ledgerTableNameAsId,
buildMode,
metricData,
monitoring,
inputCursor,
eventTypes,
escapeId,
useSqs,
xaKey,
log,
} = pool
const { eventsWithCursors } = buildInfo
let isProcedural = inputIsProcedural
if (buildMode === 'nodejs') {
isProcedural = false
}
const isContinuousMode =
typeof eventstoreAdapter.getCursorUntilEventTypes === 'function' && useSqs
const getContinuousLatestCursor = async (
cursor: ReadModelCursor,
events: Array<EventThreadData>,
eventTypes: Array<string> | null
) => {
let nextCursor = await eventstoreAdapter.getNextCursor(cursor, events)
if (isContinuousMode && eventTypes != null) {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
nextCursor = await eventstoreAdapter.getCursorUntilEventTypes!(
nextCursor,
eventTypes
)
}
return nextCursor
}
let lastSuccessEvent = null
let lastFailedEvent = null
let lastError = null
let localContinue = true
let cursor = inputCursor
const groupMonitoring =
monitoring != null
? monitoring
.group({ Part: 'ReadModelProjection' })
.group({ ReadModel: readModelName })
: null
const firstEventsLoadStartTimestamp = Date.now()
let eventsApplyStartTimestamp = Date.now()
let eventCount = 0
let projectionApplyTime = 0
const hotEvents: Array<ReadModelEvent> | null =
isContinuousMode &&
Array.isArray(eventsWithCursors) &&
checkEventsContinuity(inputCursor, eventsWithCursors)
? eventsWithCursors.map(({ event }) => event)
: null
type LoadEventsResult = ['ok', ReadModelEvent[]] | ['error', Error]
const loadEventsWithMonitoring = async (
cursorPromise: Promise<ReadModelCursor>,
limit: number,
initialTimestamp: number
): Promise<LoadEventsResult> => {
try {
const cursor = await cursorPromise
const { events } = await eventstoreAdapter.loadEvents({
eventTypes,
eventsSizeLimit: 65536 * limit,
limit,
cursor,
})
const loadDuration = Date.now() - initialTimestamp
if (groupMonitoring != null) {
if (events.length > 0) {
groupMonitoring.duration(
'EventLoad',
loadDuration / events.length,
events.length
)
}
groupMonitoring.duration('BatchLoad', loadDuration)
}
return ['ok', events]
} catch (error) {
return ['error', error]
}
}
let eventsPromise: Promise<LoadEventsResult | null> =
hotEvents == null
? loadEventsWithMonitoring(
Promise.resolve(cursor),
100,
firstEventsLoadStartTimestamp
)
: Promise.resolve(['ok', hotEvents])
const eventstoreLocalResourcePromise = (async () => {
let resourceNames = null
try {
void ({ resourceNames } =
hotEvents == null && ['plv8', 'plv8-internal'].includes(buildMode)
? await eventstoreAdapter.describe()
: { resourceNames: null })
} catch (err) {}
if (
resourceNames == null ||
resourceNames?.eventsTableName == null ||
resourceNames?.databaseName == null ||
!isProcedural
) {
return null
}
const databaseNameAsId = escapeId(resourceNames.databaseName)
const eventsTableNameAsId = escapeId(resourceNames.eventsTableName)
try {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const currentPromiseResult = (await eventsPromise)!
if (currentPromiseResult[0] === 'error') {
throw currentPromiseResult[1]
}
const immediateLastEvent = currentPromiseResult[1].slice(-1)[0]
await inlineLedgerRunQuery(`
WITH "CTE" AS (
SELECT * FROM ${databaseNameAsId}.${eventsTableNameAsId}
WHERE "threadId" = ${+immediateLastEvent.threadId}
AND "threadCounter" = ${+immediateLastEvent.threadCounter}
AND "timestamp" = ${+immediateLastEvent.timestamp}
AND "aggregateId" = ${escapeStr(immediateLastEvent.aggregateId)}
AND "aggregateVersion" = ${+immediateLastEvent.aggregateVersion}
AND "type" = ${escapeStr(immediateLastEvent.type)}
AND "payload" = ${escapeStr(
JSON.stringify(immediateLastEvent.payload)
)}
)
SELECT 1/Count("CTE".*) AS "NonZero" FROM "CTE"
`)
return resourceNames
} catch (err) {
return null
}
})()
let rootSavePointId = generateGuid(xaKey, 'ROOT')
log.debug(`Begin events apply transaction`)
await inlineLedgerRunQuery(
`BEGIN TRANSACTION;
SET TRANSACTION ISOLATION LEVEL REPEATABLE READ;
SAVEPOINT ${rootSavePointId};
WITH "CTE" AS (
SELECT "XaKey" FROM ${databaseNameAsId}.${ledgerTableNameAsId}
WHERE "EventSubscriber" = ${escapeStr(readModelName)}
AND "XaKey" = ${escapeStr(xaKey)}
AND "IsPaused" = FALSE
AND "Errors" IS NULL
FOR NO KEY UPDATE NOWAIT
)
SELECT 1/Count("CTE"."XaKey") AS "NonZero" FROM "CTE";
`,
true
)
const eventstoreLocalResourcesNames = await eventstoreLocalResourcePromise
const currentEventsResult = await eventsPromise
if (currentEventsResult != null && currentEventsResult[0] === 'error') {
throw currentEventsResult[1]
}
if (eventstoreLocalResourcesNames == null && buildMode === 'plv8-internal') {
throw new Error(
`Event subscriber ${readModelName} forced to be built only in PLV8-internal mode, but cannot do it`
)
}
let events =
eventstoreLocalResourcesNames == null && currentEventsResult != null
? currentEventsResult[1]
: null
let isLastEmptyLoop = false
for (metricData.eventLoopCount = 0; true; metricData.eventLoopCount++) {
const isEffectiveEventLoop = events == null || events.length > 0
log.debug(`Start optimistic events loading`)
let nextCursorPromise: Promise<ReadModelCursor> | null =
events != null
? getContinuousLatestCursor(cursor, events, eventTypes)
: null
let appliedEventsCount = 0
let regularWorkflow = true
if (isProcedural && isEffectiveEventLoop) {
log.debug(`Running procedural events applying`)
try {
let procedureResult: Array<{ Result: ProcedureResult }> | null = null
try {
metricData.insideProjection = true
procedureResult = (await inlineLedgerRunQuery(
`SELECT ${databaseNameAsId}.${escapeId(
`PROC-${readModelName}`
)}(${escapeStr(
JSON.stringify({
maxExecutionTime: getVacantTimeInMillis(),
...(eventstoreLocalResourcesNames != null
? {
localEventsDatabaseName:
eventstoreLocalResourcesNames.databaseName,
localEventsTableName:
eventstoreLocalResourcesNames.eventsTableName,
}
: { events }),
})
)}) AS "Result"`
)) as Array<{ Result: ProcedureResult }>
} finally {
metricData.insideProjection = false
}
if (procedureResult?.[0]?.Result == null) {
throw new Error(`Procedure was not able to be launched`)
}
const {
appliedEventsThreadData,
successEvent,
failureEvent,
failureError,
appliedCount,
status,
} = procedureResult[0].Result
if (status === 'DEPENDENCY_ERROR') {
const currentDependencyError = new Error(
`${failureError}`
) as Error & { code?: number | string }
currentDependencyError.name = 'DependencyError'
if (failureError?.message != null) {
currentDependencyError.message = failureError.message
}
if (failureError?.stack != null) {
currentDependencyError.stack = failureError.stack
}
if (
(failureError as Error & { code?: number | string })?.code != null
) {
currentDependencyError.code = (failureError as Error & {
code?: number | string
}).code
}
throw currentDependencyError
}
appliedEventsCount = appliedCount
eventCount += appliedCount
if (status === 'OK_PARTIAL' || status === 'CUSTOM_ERROR') {
nextCursorPromise = getContinuousLatestCursor(
cursor,
appliedEventsThreadData,
eventTypes
)
}
if (status === 'OK_ALL' || status === 'OK_PARTIAL') {
if (nextCursorPromise == null) {
nextCursorPromise = getContinuousLatestCursor(
cursor,
appliedEventsThreadData,
eventTypes
)
}
lastSuccessEvent = successEvent
} else if (status === 'CUSTOM_ERROR') {
lastFailedEvent = failureEvent
lastError = failureError
if (
failureError != null &&
failureEvent != null &&
groupMonitoring != null
) {
groupMonitoring
.group({ EventType: failureEvent.type })
.error(failureError)
}
}
if (getVacantTimeInMillis() < 0) {
localContinue = false
}
regularWorkflow = false
} catch (err) {
isProcedural = false
if (err instanceof PassthroughError) {
throw err
}
// https://github.com/plv8/plv8/issues/160
const inlineProcedureError =
err?.code === 'XX000' &&
err?.message?.match(/ReferenceError: require is not defined/i)
? 'Native and runtime require dependencies are prohibited in PLV8'
: serializeError(err)
// eslint-disable-next-line no-console
console.warn(
`PLV8 procedure execution for the "${readModelName}" read model's projection failed. Trying to degrade to non-plv8 mode. Reason: ${JSON.stringify(
inlineProcedureError
)}`
)
await inlineLedgerRunQuery(`ROLLBACK TO SAVEPOINT ${rootSavePointId};`)
if (events == null) {
const currentEventResult = await loadEventsWithMonitoring(
Promise.resolve(cursor),
100,
Date.now()
)
if (currentEventResult[0] === 'ok') {
events = currentEventResult[1]
} else {
throw currentEventResult[1]
}
}
nextCursorPromise = getContinuousLatestCursor(
cursor,
events,
eventTypes
)
}
log.debug(`Finish running procedural events applying`)
}
const eventsLoadStartTimestamp = Date.now()
const getEventsPromise = (regularWorkflow ||
eventstoreLocalResourcesNames == null
? loadEventsWithMonitoring.bind(
null,
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
nextCursorPromise!,
1000,
eventsLoadStartTimestamp
)
: Promise.resolve.bind(null, null)) as () => Promise<
['ok', ReadModelEvent[]] | ['error', Error]
>
eventsPromise = getEventsPromise()
if (
regularWorkflow &&
['plv8-internal', 'plv8-external', 'plv8'].includes(buildMode)
) {
throw new Error(
`Event subscriber ${readModelName} forced to be built only in PLV8 mode, but cannot do it`
)
}
if (regularWorkflow && isEffectiveEventLoop) {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
events = events!
log.debug(`Running regular workflow events applying`)
try {
for (const event of events) {
const savePointId = generateGuid(xaKey, `${appliedEventsCount}`)
try {
const handler = await modelInterop.acquireEventHandler(store, event)
if (handler != null) {
await inlineLedgerRunQuery(`SAVEPOINT ${savePointId}`)
const projectionApplyStartTimestamp = Date.now()
try {
metricData.insideProjection = true
await handler()
eventCount++
} finally {
metricData.insideProjection = false
}
projectionApplyTime += Date.now() - projectionApplyStartTimestamp
await inlineLedgerRunQuery(`RELEASE SAVEPOINT ${savePointId}`)
lastSuccessEvent = event
}
appliedEventsCount++
if (getVacantTimeInMillis() < 0) {
nextCursorPromise = getContinuousLatestCursor(
cursor,
events.slice(0, appliedEventsCount),
eventTypes
)
localContinue = false
break
}
} catch (error) {
if (error instanceof PassthroughError) {
throw error
}
nextCursorPromise = getContinuousLatestCursor(
cursor,
events.slice(0, appliedEventsCount),
eventTypes
)
await inlineLedgerRunQuery(
`ROLLBACK TO SAVEPOINT ${savePointId};
RELEASE SAVEPOINT ${savePointId}
`
)
lastFailedEvent = event
lastError = error
break
}
}
} catch (originalError) {
if (originalError instanceof PassthroughError) {
log.debug(`Running failed with PassthroughError`)
throw originalError
}
nextCursorPromise = Promise.resolve(cursor)
appliedEventsCount = 0
const composedError = new Error(
`Fatal inline ledger building error: ${originalError.message}`
)
composedError.stack = `${composedError.stack}${originalError.stack}`
lastError = composedError
lastSuccessEvent = null
lastFailedEvent = null
await inlineLedgerRunQuery(
`ROLLBACK TO SAVEPOINT ${rootSavePointId};
RELEASE SAVEPOINT ${rootSavePointId}
`
)
}
log.debug(`Finish running regular workflow events applying`)
}
const nextCursor = await nextCursorPromise
if (lastError == null && isEffectiveEventLoop) {
log.debug(`Saving success event into inline ledger`)
await inlineLedgerRunQuery(
`UPDATE ${databaseNameAsId}.${ledgerTableNameAsId} SET
${
lastSuccessEvent != null
? `"SuccessEvent" = ${escapeStr(
JSON.stringify(lastSuccessEvent)
)},`
: ''
}
"Cursor" = ${escapeStr(JSON.stringify(nextCursor))}
WHERE "EventSubscriber" = ${escapeStr(readModelName)};
COMMIT;
`
)
} else if (isEffectiveEventLoop) {
log.debug(`Saving error and failed event into inline ledger`)
await inlineLedgerRunQuery(
`UPDATE ${databaseNameAsId}.${ledgerTableNameAsId}
SET "Errors" = jsonb_insert(
COALESCE("Errors", jsonb('[]')),
CAST(('{' || jsonb_array_length(COALESCE("Errors", jsonb('[]'))) || '}') AS TEXT[]),
jsonb(${escapeStr(JSON.stringify(serializeError(lastError)))})
),
${
lastFailedEvent != null
? `"FailedEvent" = ${escapeStr(JSON.stringify(lastFailedEvent))},`
: ''
}
${
lastSuccessEvent != null
? `"SuccessEvent" = ${escapeStr(
JSON.stringify(lastSuccessEvent)
)},`
: ''
}
"Cursor" = ${escapeStr(JSON.stringify(nextCursor))}
WHERE "EventSubscriber" = ${escapeStr(readModelName)};
COMMIT;
`
)
}
log.debug(`Inline ledger updated after events applied`)
if (groupMonitoring != null && eventCount > 0) {
const applyDuration = Date.now() - eventsApplyStartTimestamp
groupMonitoring.rate(
'ReadModelFeedingRate',
eventCount,
applyDuration / 1000
)
groupMonitoring.duration(
'EventApply',
applyDuration / eventCount,
eventCount
)
groupMonitoring.duration(
'EventProjectionApply',
projectionApplyTime / eventCount,
eventCount
)
}
eventCount = 0
eventsApplyStartTimestamp = Date.now()
projectionApplyTime = 0
const isBuildSuccess: boolean =
lastError == null && (appliedEventsCount > 0 || !isLastEmptyLoop)
isLastEmptyLoop = isBuildSuccess && appliedEventsCount === 0
cursor = nextCursor
if (getVacantTimeInMillis() < 0) {
localContinue = false
}
if (isBuildSuccess && localContinue) {
log.debug(`Start transaction for the next step events applying`)
rootSavePointId = generateGuid(xaKey, 'ROOT')
const runNextDatabaseLoop = inlineLedgerRunQuery.bind(
null,
`BEGIN TRANSACTION;
SET TRANSACTION ISOLATION LEVEL REPEATABLE READ;
SAVEPOINT ${rootSavePointId};
WITH "CTE" AS (
SELECT "XaKey" FROM ${databaseNameAsId}.${ledgerTableNameAsId}
WHERE "EventSubscriber" = ${escapeStr(readModelName)}
AND "XaKey" = ${escapeStr(xaKey)}
AND "IsPaused" = FALSE
AND "Errors" IS NULL
FOR NO KEY UPDATE NOWAIT
)
SELECT 1/Count("CTE"."XaKey") AS "NonZero" FROM "CTE";
`,
true
)
if (!isLastEmptyLoop) {
await runNextDatabaseLoop()
const currentEventsResult = await eventsPromise
if (currentEventsResult != null && currentEventsResult[0] === 'error') {
throw currentEventsResult[1]
}
events = currentEventsResult != null ? currentEventsResult[1] : null
} else {
eventsPromise = getEventsPromise()
const currentEventsResult = await eventsPromise
if (currentEventsResult != null && currentEventsResult[0] === 'error') {
throw currentEventsResult[1]
}
events = currentEventsResult != null ? currentEventsResult[1] : null
await runNextDatabaseLoop()
}
} else {
if (isBuildSuccess) {
log.debug(`Going to the next step of building`)
await next()
}
log.debug(`Exit from events building`)
throw new PassthroughError(false)
}
}
}
const build: ExternalMethods['build'] = async (
basePool,
readModelName,
store,
modelInterop,
next,
eventstoreAdapter,
inputGetVacantTimeInMillis,
buildInfo
) => {
const log = getLog('build')
log.debug(`Start building`)
const getVacantTimeInMillis = () =>
Math.max(inputGetVacantTimeInMillis() - immediatelyStopTimeout, 0)
eventstoreAdapter.establishTimeLimit(getVacantTimeInMillis)
const { eventsWithCursors, ...inputMetricData } = buildInfo
const metricData = {
...inputMetricData,
pureLedgerTime: 0,
insideProjection: false,
}
void eventsWithCursors
const {
PassthroughError,
inlineLedgerRunQuery: ledgerQuery,
schemaName,
tablePrefix,
escapeId,
escapeStr,
generateGuid,
monitoring,
} = basePool
const now = Date.now()
const hasSendTime =
metricData.sendTime != null && metricData.sendTime.constructor === Number
const groupMonitoring =
monitoring != null
? monitoring
.group({ Part: 'ReadModelProjection' })
.group({ ReadModel: readModelName })
: null
if (hasSendTime) {
for (const innerMonitoring of [monitoring, groupMonitoring]) {
if (innerMonitoring != null) {
innerMonitoring.duration('EventDelivery', now - metricData.sendTime)
}
}
}
const inlineLedgerRunQuery: typeof ledgerQuery = Object.assign(
async (...args: any[]): Promise<any> => {
const inlineLedgerStartTimestamp = Date.now()
try {
return await (ledgerQuery as any)(...args)
} finally {
if (!metricData.insideProjection) {
metricData.pureLedgerTime += Date.now() - inlineLedgerStartTimestamp
}
}
},
ledgerQuery
)
try {
basePool.activePassthrough = true
const databaseNameAsId = escapeId(schemaName)
const databaseNameAsStr = escapeStr(schemaName)
const ledgerTableNameAsId = escapeId(
`${tablePrefix}__${schemaName}__LEDGER__`
)
const trxTableNameAsId = escapeId(`${tablePrefix}__${schemaName}__TRX__`)
const firstRandom = Math.random()
let lastRandom: number | null = null
// More entropy via branch misprediction and more context changes
for (let index = 0; index < Math.floor(firstRandom * 50) + 1; index++) {
lastRandom = Math.random()
}
const xaKey = generateGuid(
`${Date.now()}${firstRandom}${lastRandom}${process.pid}`
)
log.debug(`Running inline ledger query`)
await basePool.ensureAffectedOperation('build', readModelName)
const rows = (await inlineLedgerRunQuery(
`WITH "MaybeAcquireLock" AS (
SELECT * FROM ${databaseNameAsId}.${ledgerTableNameAsId}
WHERE "EventSubscriber" = ${escapeStr(readModelName)}
AND "IsPaused" = FALSE
AND "Errors" IS NULL
FOR NO KEY UPDATE NOWAIT
), "InsertTrx" AS (
INSERT INTO ${databaseNameAsId}.${trxTableNameAsId}(
"Timestamp", "XaKey", "XaValue"
) VALUES (
CAST(extract(epoch from clock_timestamp()) * 1000 AS BIGINT),
${escapeStr(xaKey)},
CAST(pg_backend_pid() AS VARCHAR(190))
)
RETURNING *
), "UpdateTrx" AS (
UPDATE ${databaseNameAsId}.${ledgerTableNameAsId}
SET "XaKey" = ${escapeStr(xaKey)}
WHERE "EventSubscriber" = ${escapeStr(readModelName)}
AND CAST(COALESCE((SELECT LEAST(Count("InsertTrx".*), 0) FROM "InsertTrx"), 0) AS BIGINT) = 0
AND (SELECT Count("MaybeAcquireLock".*) FROM "MaybeAcquireLock") = 1
AND "IsPaused" = FALSE
AND "Errors" IS NULL
RETURNING *
)
SELECT "UpdateTrx".*, (
SELECT Count(*) FROM information_schema.routines
WHERE routines.specific_schema=${databaseNameAsStr} AND
routines.routine_name = ${escapeStr(`PROC-${readModelName}`)}
) > 0 AS "IsProcedural"
FROM "UpdateTrx"
`
)) as Array<ReadModelProcedureLedger>
let readModelLedger = rows.length === 1 ? rows[0] : null
if (readModelLedger == null || readModelLedger.Errors != null) {
log.debug(`Ledger is not locked. Throwing PassthroughError`)
throw new PassthroughError(false)
}
const eventTypes =
readModelLedger.EventTypes != null ? readModelLedger.EventTypes : null
if (!Array.isArray(eventTypes) && eventTypes != null) {
throw new TypeError('eventTypes')
}
const cursor =
readModelLedger.Cursor != null ? readModelLedger.Cursor : null
if (cursor != null && cursor.constructor !== String) {
throw new TypeError('cursor')
}
const currentPool = {
ledgerTableNameAsId,
databaseNameAsId,
eventTypes,
inputCursor: cursor,
readModelLedger,
metricData,
xaKey,
log,
}
let buildMethod: typeof buildEvents
if (cursor == null) {
log.debug(`Ledger is locked. Running build init`)
buildMethod = buildInit
} else {
log.debug(`Ledger is locked. Running build events`)
buildMethod = buildEvents
}
let barrierTimeout: ReturnType<typeof setTimeout> | null = null
try {
await Promise.race([
new Promise((_, reject) => {
barrierTimeout = setTimeout(() => {
reject(immediatelyStopError)
barrierTimeout = null
}, inputGetVacantTimeInMillis())
}),
buildMethod(
currentPool,
basePool,
readModelName,
store,
modelInterop,
next,
eventstoreAdapter,
getVacantTimeInMillis,
buildInfo
),
])
} finally {
if (barrierTimeout != null) {
clearTimeout(barrierTimeout)
barrierTimeout = null
}
}
try {
await inlineLedgerRunQuery(`
DELETE FROM ${databaseNameAsId}.${trxTableNameAsId}
WHERE "Timestamp" < CAST(extract(epoch from clock_timestamp()) * 1000 AS BIGINT) - 86400000
`)
} catch (err) {
if (!(err instanceof PassthroughError)) {
log.debug(`Unknown error is thrown while cleaning TRX journal`)
throw err
}
}
} catch (error) {
if (error === immediatelyStopError) {
try {
await basePool.connection.end()
} catch (e) {}
await next()
return
}
if (
error == null ||
!(
error instanceof PassthroughError ||
error.name === 'AlreadyDisposedError' ||
error.name === 'RequestTimeoutError' ||
error.name === 'ServiceBusyError'
)
) {
log.debug(`Unknown error is thrown while building`)
throw error
}
log.debug(`PassthroughError is thrown while building`)
const passthroughError = error as PassthroughErrorInstance
try {
log.debug(`Running rollback after error`)
await inlineLedgerRunQuery(`ROLLBACK`)
log.debug(`Transaction is rolled back`)
} catch (err) {
if (!(err instanceof PassthroughError)) {
log.debug(`Unknown error is thrown while rollback`)
throw err
}
log.debug(`PassthroughError is thrown while rollback`)
}
if (
passthroughError.isRetryable ||
error.name === 'AlreadyDisposedError' ||
error.name === 'RequestTimeoutError' ||
error.name === 'ServiceBusyError'
) {
log.debug(`PassthroughError is retryable. Going to the next step`)
await next()
}
} finally {
log.debug(`Building is finished`)
basePool.activePassthrough = false
for (const innerMonitoring of [monitoring, groupMonitoring]) {
if (innerMonitoring != null) {
innerMonitoring.duration('Ledger', metricData.pureLedgerTime)
}
}
}
}
export default build | the_stack |
import { source as b1 } from './data/blog-1'
import b1s from './data/blog-2.svg'
import { source as b2 } from './data/blog-2'
import b2s from './data/blog-1.svg'
import { source as b3 } from './data/blog-3'
import b3s from './data/blog-3.svg'
import { source as b4 } from './data/blog-4'
import b4s from './data/blog-4.svg'
import { source as b5 } from './data/blog-5'
import b5s from './data/blog-5.svg'
import { source as c1 } from './data/contact-1'
import c1s from './data/contact-1.svg'
import { source as c2 } from './data/contact-2'
import c2s from './data/contact-2.svg'
import { source as c3 } from './data/contact-3'
import c3s from './data/contact-3.svg'
import { source as d1 } from './data/content-1'
import d1s from './data/content-1.svg'
import { source as d2 } from './data/content-2'
import d2s from './data/content-2.svg'
import { source as d3 } from './data/content-3'
import d3s from './data/content-3.svg'
import { source as d4 } from './data/content-4'
import d4s from './data/content-4.svg'
import { source as d5 } from './data/content-5'
import d5s from './data/content-5.svg'
import { source as d6 } from './data/content-6'
import d6s from './data/content-6.svg'
import { source as d7 } from './data/content-7'
import d7s from './data/content-7.svg'
import { source as d8 } from './data/content-8'
import d8s from './data/content-8.svg'
import { source as a1 } from './data/cta-1'
import a1s from './data/cta-1.svg'
import { source as a2 } from './data/cta-2'
import a2s from './data/cta-2.svg'
import { source as a3 } from './data/cta-3'
import a3s from './data/cta-3.svg'
import { source as a4 } from './data/cta-4'
import a4s from './data/cta-4.svg'
import { source as e1 } from './data/ecommerce-1'
import e1s from './data/ecommerce-1.svg'
import { source as e2 } from './data/ecommerce-2'
import e2s from './data/ecommerce-2.svg'
import { source as e3 } from './data/ecommerce-3'
import e3s from './data/ecommerce-3.svg'
import { source as f1 } from './data/feature-1'
import f1s from './data/feature-1.svg'
import { source as f2 } from './data/feature-2'
import f2s from './data/feature-2.svg'
import { source as f3 } from './data/feature-3'
import f3s from './data/feature-3.svg'
import { source as f4 } from './data/feature-4'
import f4s from './data/feature-4.svg'
import { source as f5 } from './data/feature-5'
import f5s from './data/feature-5.svg'
import { source as f6 } from './data/feature-6'
import f6s from './data/feature-6.svg'
import { source as f7 } from './data/feature-7'
import f7s from './data/feature-7.svg'
import { source as f8 } from './data/feature-8'
import f8s from './data/feature-8.svg'
import { source as z1 } from './data/footer-1'
import z1s from './data/footer-1.svg'
import { source as z2 } from './data/footer-2'
import z2s from './data/footer-2.svg'
import { source as z3 } from './data/footer-3'
import z3s from './data/footer-3.svg'
import { source as z4 } from './data/footer-4'
import z4s from './data/footer-4.svg'
import { source as z5 } from './data/footer-5'
import z5s from './data/footer-5.svg'
import { source as g1 } from './data/gallery-1'
import g1s from './data/gallery-1.svg'
import { source as g2 } from './data/gallery-2'
import g2s from './data/gallery-2.svg'
import { source as g3 } from './data/gallery-3'
import g3s from './data/gallery-3.svg'
import { source as h1 } from './data/header-1'
import h1s from './data/header-1.svg'
import { source as h2 } from './data/header-2'
import h2s from './data/header-2.svg'
import { source as h3 } from './data/header-3'
import h3s from './data/header-3.svg'
import { source as h4 } from './data/header-4'
import h4s from './data/header-4.svg'
import { source as r1 } from './data/hero-1'
import r1s from './data/hero-1.svg'
import { source as r2 } from './data/hero-2'
import r2s from './data/hero-2.svg'
import { source as r3 } from './data/hero-3'
import r3s from './data/hero-3.svg'
import { source as r4 } from './data/hero-4'
import r4s from './data/hero-4.svg'
import { source as r5 } from './data/hero-5'
import r5s from './data/hero-5.svg'
import { source as r6 } from './data/hero-6'
import r6s from './data/hero-6.svg'
import { source as p1 } from './data/pricing-1'
import p1s from './data/pricing-1.svg'
import { source as p2 } from './data/pricing-2'
import p2s from './data/pricing-2.svg'
import { source as s1 } from './data/statistic-1'
import s1s from './data/statistic-1.svg'
import { source as s2 } from './data/statistic-2'
import s2s from './data/statistic-2.svg'
import { source as s3 } from './data/statistic-3'
import s3s from './data/statistic-3.svg'
import { source as q1 } from './data/step-1'
import q1s from './data/step-1.svg'
// import { source as q2 } from './data/step-2'
// import q2s from './data/step-2.svg'
import { source as q3 } from './data/step-3'
import q3s from './data/step-3.svg'
import { source as t1 } from './data/team-1'
import t1s from './data/team-1.svg'
import { source as t2 } from './data/team-2'
import t2s from './data/team-2.svg'
import { source as t3 } from './data/team-3'
import t3s from './data/team-3.svg'
import { source as m1 } from './data/testimonial-1'
import m1s from './data/testimonial-1.svg'
import { source as m2 } from './data/testimonial-2'
import m2s from './data/testimonial-2.svg'
import { source as m3 } from './data/testimonial-3'
import m3s from './data/testimonial-3.svg'
import { getSvgHtml } from '../../utils'
const sources = [
{
id: 'blog-block-1',
// class: 'fa fa-map-o',
class: '',
label: getSvgHtml(b1s),
// label: 'b2s().outerHTML',
content: b1,
category: 'Blog',
// order: 1
},
{
id: 'blog-block-2',
class: '',
label: getSvgHtml(b2s),
content: b2,
category: 'Blog',
// order: 1
},
{
id: 'blog-block-3',
class: '',
label: getSvgHtml(b3s),
content: b3,
category: 'Blog',
// order: 1
},
{
id: 'blog-block-4',
class: '',
label: getSvgHtml(b4s),
content: b4,
category: 'Blog',
// order: 1
},
{
id: 'blog-block-5',
class: '',
label: getSvgHtml(b5s),
content: b5,
category: 'Blog',
// order: 1
},
{
id: 'contact-block-1',
class: '',
label: getSvgHtml(c1s),
content: c1,
category: 'Contact',
// order: 1
},
{
id: 'contact-block-2',
class: '',
label: getSvgHtml(c2s),
content: c2,
category: 'Contact',
// order: 1
},
{
id: 'contact-block-3',
class: '',
label: getSvgHtml(c3s),
content: c3,
category: 'Contact',
// order: 1
},
{
id: 'content-block-1',
class: '',
label: getSvgHtml(d1s),
content: d1,
category: 'Content',
// order: 1
},
{
id: 'content-block-2',
class: '',
label: getSvgHtml(d2s),
content: d2,
category: 'Content',
// order: 1
},
{
id: 'content-block-3',
class: '',
label: getSvgHtml(d3s),
content: d3,
category: 'Content',
// order: 1
},
{
id: 'content-block-4',
class: '',
label: getSvgHtml(d4s),
content: d4,
category: 'Content',
// order: 1
},
{
id: 'content-block-5',
class: '',
label: getSvgHtml(d5s),
content: d5,
category: 'Content',
// order: 1
},
{
id: 'content-block-6',
class: '',
label: getSvgHtml(d6s),
content: d6,
category: 'Content',
// order: 1
},
{
id: 'content-block-7',
class: '',
label: getSvgHtml(d7s),
content: d7,
category: 'Content',
// order: 1
},
{
id: 'content-block-8',
class: '',
label: getSvgHtml(d8s),
content: d8,
category: 'Content',
// order: 1
},
{
id: 'cta-block-1',
class: '',
label: getSvgHtml(a1s),
content: a1,
category: 'CTA',
// order: 1
},
{
id: 'cta-block-2',
class: '',
label: getSvgHtml(a2s),
content: a2,
category: 'CTA',
// order: 1
},
{
id: 'cta-block-3',
class: '',
label: getSvgHtml(a3s),
content: a3,
category: 'CTA',
// order: 1
},
{
id: 'cta-block-4',
class: '',
label: getSvgHtml(a4s),
content: a4,
category: 'CTA',
// order: 1
},
{
id: 'commerce-block-1',
class: '',
label: getSvgHtml(e1s),
content: e1,
category: 'Commerce',
// order: 1
},
{
id: 'commerce-block-2',
class: '',
label: getSvgHtml(e2s),
content: e2,
category: 'Commerce',
// order: 1
},
{
id: 'commerce-block-3',
class: '',
label: getSvgHtml(e3s),
content: e3,
category: 'Commerce',
// order: 1
},
{
id: 'feature-block-1',
class: '',
label: getSvgHtml(f1s),
content: f1,
category: 'Features',
// order: 1
},
{
id: 'feature-block-2',
class: '',
label: getSvgHtml(f2s),
content: f2,
category: 'Features',
// order: 1
},
{
id: 'feature-block-3',
class: '',
label: getSvgHtml(f3s),
content: f3,
category: 'Features',
// order: 1
},
{
id: 'feature-block-4',
class: '',
label: getSvgHtml(f4s),
content: f4,
category: 'Features',
// order: 1
},
{
id: 'feature-block-5',
class: '',
label: getSvgHtml(f5s),
content: f5,
category: 'Features',
// order: 1
},
{
id: 'feature-block-6',
class: '',
label: getSvgHtml(f6s),
content: f6,
category: 'Features',
// order: 1
},
{
id: 'feature-block-7',
class: '',
label: getSvgHtml(f7s),
content: f7,
category: 'Features',
// order: 1
},
{
id: 'feature-block-8',
class: '',
label: getSvgHtml(f8s),
content: f8,
category: 'Features',
// order: 1
},
{
id: 'footer-block-1',
class: '',
label: getSvgHtml(z1s),
content: z1,
category: 'Footer',
// order: 1
},
{
id: 'footer-block-2',
class: '',
label: getSvgHtml(z2s),
content: z2,
category: 'Footer',
// order: 1
},
{
id: 'footer-block-3',
class: '',
label: getSvgHtml(z3s),
content: z3,
category: 'Footer',
// order: 1
},
{
id: 'footer-block-4',
class: '',
label: getSvgHtml(z4s),
content: z4,
category: 'Footer',
// order: 1
},
{
id: 'footer-block-5',
class: '',
label: getSvgHtml(z5s),
content: z5,
category: 'Footer',
// order: 1
},
{
id: 'gallery-block-1',
class: '',
label: getSvgHtml(g1s),
content: g1,
category: 'Gallery',
// order: 1
},
{
id: 'gallery-block-2',
class: '',
label: getSvgHtml(g2s),
content: g2,
category: 'Gallery',
// order: 1
},
{
id: 'gallery-block-3',
class: '',
label: getSvgHtml(g3s),
content: g3,
category: 'Gallery',
// order: 1
},
{
id: 'header-block-1',
class: '',
label: getSvgHtml(h1s),
content: h1,
category: 'Header',
// order: 1
},
{
id: 'header-block-2',
class: '',
label: getSvgHtml(h2s),
content: h2,
category: 'Header',
// order: 1
},
{
id: 'header-block-3',
class: '',
label: getSvgHtml(h3s),
content: h3,
category: 'Header',
// order: 1
},
{
id: 'header-block-4',
class: '',
label: getSvgHtml(h4s),
content: h4,
category: 'Header',
// order: 1
},
{
id: 'hero-block-1',
class: '',
label: getSvgHtml(r1s),
content: r1,
category: 'Hero',
// order: 1
},
{
id: 'hero-block-2',
class: '',
label: getSvgHtml(r2s),
content: r2,
category: 'Hero',
// order: 1
},
{
id: 'hero-block-3',
class: '',
label: getSvgHtml(r3s),
content: r3,
category: 'Hero',
// order: 1
},
{
id: 'hero-block-4',
class: '',
label: getSvgHtml(r4s),
content: r4,
category: 'Hero',
// order: 1
},
{
id: 'hero-block-5',
class: '',
label: getSvgHtml(r5s),
content: r5,
category: 'Hero',
// order: 1
},
{
id: 'hero-block-6',
class: '',
label: getSvgHtml(r6s),
content: r6,
category: 'Hero',
// order: 1
},
{
id: 'pricing-block-1',
class: '',
label: getSvgHtml(p1s),
content: p1,
category: 'Pricing',
// order: 1
},
{
id: 'pricing-block-2',
class: '',
label: getSvgHtml(p2s),
content: p2,
category: 'Pricing',
// order: 1
},
{
id: 'statistic-block-1',
class: '',
label: getSvgHtml(s1s),
content: s1,
category: 'Statistics',
// order: 1
},
{
id: 'statistic-block-2',
class: '',
label: getSvgHtml(s2s),
content: s2,
category: 'Statistics',
// order: 1
},
{
id: 'statistic-block-3',
class: '',
label: getSvgHtml(s3s),
content: s3,
category: 'Statistics',
// order: 1
},
{
id: 'step-block-1',
class: '',
label: getSvgHtml(q1s),
content: q1,
category: 'Steps',
// order: 1
},
// {
// id: 'step-block-2',
// class: '',
// label: getSvgHtml(q2s),
// content: q2,
// category: 'Steps',
// // order: 1
// },
{
id: 'step-block-3',
class: '',
label: getSvgHtml(q3s),
content: q3,
category: 'Steps',
// order: 1
},
{
id: 'team-block-1',
class: '',
label: getSvgHtml(t1s),
content: t1,
category: 'Team',
// order: 1
},
{
id: 'team-block-2',
class: '',
label: getSvgHtml(t2s),
content: t2,
category: 'Team',
// order: 1
},
{
id: 'team-block-3',
class: '',
label: getSvgHtml(t3s),
content: t3,
category: 'Team',
// order: 1
},
{
id: 'testimonial-block-1',
class: '',
label: getSvgHtml(m1s),
content: m1,
category: 'Testimonials',
// order: 1
},
{
id: 'testimonial-block-2',
class: '',
label: getSvgHtml(m2s),
content: m2,
category: 'Testimonials',
// order: 1
},
{
id: 'testimonial-block-3',
class: '',
label: getSvgHtml(m3s),
content: m3,
category: 'Testimonials',
// order: 1
},
]
const loadTailwindBlocks = (newEditor: { BlockManager: any }): void => {
const blockManager = newEditor.BlockManager
sources.forEach((s) => {
blockManager.add(s.id, {
label: s.label,
attributes: { class: s.class },
content: s.content,
category: { label: s.category, open: s.category === 'Blog' },
})
})
}
export { loadTailwindBlocks } | the_stack |
import { GeoPackage } from '../geoPackage';
import { GeoPackageConnection } from '../db/geoPackageConnection';
import { ColumnValues } from './columnValues';
/**
* Dao module.
*/
import { SqliteQueryBuilder } from '../db/sqliteQueryBuilder';
import { DBValue } from '../db/dbAdapter';
import { CoreSQLUtils } from '../db/coreSQLUtils';
/**
* Base DAO
*/
export abstract class Dao<T> {
/**
* Database connection to the sqlite file
*/
readonly connection: GeoPackageConnection;
/**
* ID Columns for this DAO
*/
idColumns: string[];
/**
* Name of the table within the GeoPackage
*/
gpkgTableName: string;
/**
*
* @param geoPackage GeoPackage object this dao belongs to
*/
constructor(readonly geoPackage: GeoPackage) {
this.connection = geoPackage.database;
}
/**
* Creates a object from the result
*/
abstract createObject(result: Record<string, DBValue>): T;
/**
* Checks if the table exists
*/
isTableExists(): boolean {
return this.connection.isTableExists(this.gpkgTableName);
}
/**
* Refreshes the object by id
* @param object object to refresh
*/
refresh(object: T): T | undefined {
return this.queryForSameId(object);
}
/**
* Query for object by id
* @param id ID of the object to query for
* @return object created from the raw database object
*/
queryForId(id: DBValue): T | undefined {
const whereString = this.buildPkWhere(id);
const whereArgs = this.buildPkWhereArgs(id);
const query = SqliteQueryBuilder.buildQuery(false, "'" + this.gpkgTableName + "'", undefined, whereString);
const result = this.connection.get(query, whereArgs);
if (!result) return;
return this.createObject(result);
}
queryForSameId(object: T): T {
const idArray = this.getMultiId(object);
return this.queryForMultiId(idArray);
}
getMultiId(object: T | any): any[] {
const idValues: any[] = [];
for (let i = 0; i < this.idColumns.length; i++) {
const idValue = object.values ? object.values[this.idColumns[i]] : object[this.idColumns[i]];
if (idValue !== undefined) {
idValues.push(idValue);
}
}
return idValues;
}
/**
* Query for object by multi id
* @param idValues ColumnValues with the multi id
* @return object created from the raw database object
*/
queryForMultiId(idValues: DBValue[]): T {
const whereString = this.buildPkWhere(idValues);
const whereArgs = this.buildPkWhereArgs(idValues);
const query = SqliteQueryBuilder.buildQuery(false, "'" + this.gpkgTableName + "'", undefined, whereString);
const result = this.connection.get(query, whereArgs);
if (!result) return;
return this.createObject(result);
}
/**
* Queries for all matches and returns them in the callback. Be aware this pulls all results into memory
* @param {string} [where] Optional where clause
* @param {object[]} [whereArgs] Optional where args array
* @return {Object[]} raw object array from the database
*/
queryForAll(where?: string, whereArgs?: DBValue[]): Record<string, DBValue>[] {
const query = SqliteQueryBuilder.buildQuery(false, "'" + this.gpkgTableName + "'", undefined, where);
return this.connection.all(query, whereArgs);
}
/**
* Queries for all matches and returns them in the callback. Be aware this pulls all results into memory
* @param {string} fieldName name of the field to query for
* @param {string} value value of the like clause
* @return {Object[]} raw object array from the database
*/
queryForLike(fieldName: string, value: string): Record<string, DBValue>[] {
const values = new ColumnValues();
values.addColumn(fieldName, value);
const where = this.buildWhereLike(values);
const whereArgs = this.buildWhereArgs(value);
const query = SqliteQueryBuilder.buildQuery(false, "'" + this.gpkgTableName + "'", undefined, where);
return this.connection.all(query, whereArgs);
}
/**
* Queries for all matches and returns them. Only queries for the specified column name Be aware this pulls all results into memory
* @param {string} columnName name of the column to query for
* @param {module:dao/columnValues~ColumnValues} [fieldValues] optional values to filter on
* @return {Object[]} raw object array from the database
*/
queryForColumns(columnName: string, fieldValues?: ColumnValues): Record<string, DBValue>[] {
let where: string | undefined = undefined;
let whereArgs: DBValue[] | null = null;
if (fieldValues) {
where = this.buildWhere(fieldValues);
whereArgs = this.buildWhereArgs(fieldValues);
}
const query = SqliteQueryBuilder.buildQuery(false, "'" + this.gpkgTableName + "'", [columnName], where);
return this.connection.all(query, whereArgs);
}
/**
* Queries for all items in the table with a page size and page number
* @param {number} pageSize size of the chunk to query for
* @param {number} page chunk number to query for
* @return {Object[]} raw object array from the database
*/
queryForChunk(pageSize: number, page: number): Record<string, DBValue>[] {
const query = SqliteQueryBuilder.buildQuery(
false,
"'" + this.gpkgTableName + "'",
undefined,
undefined,
undefined,
undefined,
undefined,
this.idColumns[0],
pageSize,
page * pageSize,
);
return this.connection.all(query);
}
/**
* Iterate all items in the table one at a time. If no parameters are passed, iterates the entire table. Returns an Iterable object
* @param {string} [field] field to filter on
* @param {Object} [value] value to filter on
* @param {string} [groupBy] group by clause
* @param {string} [having] having clause
* @param {string} [orderBy] order by clause
* @return {IterableIterator<any>} iterable of database objects
*/
queryForEach(
field?: string,
value?: DBValue,
groupBy?: string,
having?: string,
orderBy?: string,
): IterableIterator<Record<string, DBValue>> {
if (!field) {
const query: string = SqliteQueryBuilder.buildQuery(
false,
"'" + this.gpkgTableName + "'",
undefined,
undefined,
undefined,
groupBy,
having,
orderBy,
);
return this.connection.each(query);
} else {
const whereString: string = this.buildWhereWithFieldAndValue(field, value);
const whereArgs: DBValue[] | null = this.buildWhereArgs(value);
const query = SqliteQueryBuilder.buildQuery(
false,
"'" + this.gpkgTableName + "'",
undefined,
whereString,
undefined,
groupBy,
having,
orderBy,
);
return this.connection.each(query, whereArgs);
}
}
/**
* Iterate all objects in thet able that match the ColumnValues passed in
* @param {module:dao/columnValues~ColumnValues} fieldValues ColumnValues to query for
* @return {IterableIterator<any>}
*/
queryForFieldValues(fieldValues: ColumnValues): IterableIterator<Record<string, DBValue>> {
const whereString: string = this.buildWhere(fieldValues);
const whereArgs: DBValue[] = this.buildWhereArgs(fieldValues);
const query = SqliteQueryBuilder.buildQuery(false, "'" + this.gpkgTableName + "'", undefined, whereString);
return this.connection.each(query, whereArgs);
}
/**
* Iterate all matching objects
* @param {string} join join clause
* @param {string} where where clause
* @param {Object[]} whereArgs array of where query values
* @param {string[]} columns columns to query for
* @return {IterableIterator<any>}
*/
queryJoinWhereWithArgs(
join: string,
where?: string,
whereArgs?: DBValue[],
columns?: string[],
): IterableIterator<Record<string, DBValue>> {
const query = SqliteQueryBuilder.buildQuery(false, "'" + this.gpkgTableName + "'", columns, where, join);
return this.connection.each(query, whereArgs);
}
/**
* Count all matching objects
* @param {string} join join clause
* @param {string} where where clause
* @param {Object[]} whereArgs array of where query values
* @return {number}
*/
countJoinWhereWithArgs(join: string, where?: string, whereArgs?: DBValue[]): number {
const query = "select COUNT(*) as count from '" + this.gpkgTableName + "' " + join + ' where ' + where;
const result = this.connection.get(query, whereArgs);
return result?.count;
}
/**
* Iterate all distinct matching rows in the table
* @param {string} where where clause
* @param {Object[]} whereArgs array of where query values
* @return {IterableIterator<any>}
*/
queryWhereWithArgsDistinct(where: string, whereArgs?: DBValue[]): IterableIterator<Record<string, DBValue>> {
const query = SqliteQueryBuilder.buildQuery(true, "'" + this.gpkgTableName + "'", undefined, where);
return this.connection.each(query, whereArgs);
}
/**
* Iterate all matching rows
* @param {string} [where] where clause
* @param {Object[]} [whereArgs] array of where query values
* @param {string} [groupBy] group by clause
* @param {string} [having] having clause
* @param {string} [orderBy] order by clause
* @param {number} [limit] limit clause
* @return {IterableIterator<any>}
*/
queryWhere(
where?: string,
whereArgs?: DBValue[],
groupBy?: string,
having?: string,
orderBy?: string,
limit?: number,
): IterableIterator<Record<string, DBValue>> {
const query: string = SqliteQueryBuilder.buildQuery(
false,
"'" + this.gpkgTableName + "'",
undefined,
where,
undefined,
groupBy,
having,
orderBy,
limit,
);
return this.connection.each(query, whereArgs);
}
/**
* Get the primary key where clause
* @param {Object|Object[]} idValue id
* @return {string} primary key where clause
*/
buildPkWhere(idValue: any[] | any): string {
if (Array.isArray(idValue)) {
const idValuesArray = idValue;
const idColumnValues = new ColumnValues();
for (let i = 0; i < idValuesArray.length; i++) {
idColumnValues.addColumn(this.idColumns[i], idValuesArray[i]);
}
return this.buildWhere(idColumnValues);
}
return this.buildWhereWithFieldAndValue(this.idColumns[0], idValue);
}
/**
* Get the primary key where args
* @param {Object} idValue id
* @return {Object[]} where args
*/
buildPkWhereArgs(idValue: DBValue[] | DBValue): DBValue[] {
if (Array.isArray(idValue)) {
const idValuesArray = idValue;
let values: DBValue[] = [];
for (let i = 0; i < idValuesArray.length; i++) {
values = values.concat(this.buildWhereArgs(idValuesArray[i]));
}
return values;
}
return this.buildWhereArgs(idValue);
}
/**
* Build where (or selection) LIKE statement for fields
* @param {module:dao/columnValues~ColumnValues} fields columns and values
* @param {string} [operation] AND or OR
* @return {string} where clause
*/
buildWhereLike(fields: ColumnValues, operation?: string): string {
let whereString = '';
for (let i = 0; i < fields.columns.length; i++) {
const column = fields.columns[i];
if (i) {
whereString += ' ' + operation + ' ';
}
whereString += this.buildWhereWithFieldAndValue(column, fields.getValue(column), 'like');
}
return whereString;
}
/**
* Build where or selection statement for fields
* @param fields columns and values
* @param [operation=AND] AND or OR
* @return where clause
*/
buildWhere(fields: ColumnValues, operation = 'and'): string {
let whereString = '';
for (let i = 0; i < fields.columns.length; i++) {
const column = fields.columns[i];
if (i) {
whereString += ' ' + operation + ' ';
}
whereString += this.buildWhereWithFieldAndValue(column, fields.getValue(column));
}
return whereString;
}
/**
* Builds a where args array
* @param {any[]|ColumnValues|any} values argument values to push
* @returns {any[]}
*/
buildWhereArgs(values: DBValue[] | ColumnValues | DBValue): DBValue[] | null {
let args: DBValue[] = [];
if (Array.isArray(values)) {
args = this._buildWhereArgsWithArray(values);
} else if (values instanceof ColumnValues) {
args = this._buildWhereArgsWithColumnValues(values);
} else {
if (values !== undefined || values !== null) {
args.push(values);
}
}
return args.length ? args : null;
}
/**
* Builds a where args array
* @param {any[]} values argument values to push
* @returns {any[]}
*/
_buildWhereArgsWithArray(values: DBValue[]): DBValue[] {
const args = [];
for (let i = 0; i < values.length; i++) {
const value = values[i];
if (value !== undefined && value !== null) {
args.push(value);
}
}
return args;
}
/**
* Builds a where args array
* @param {ColumnValues} values argument values to push
* @returns {any[]}
*/
_buildWhereArgsWithColumnValues(values: ColumnValues): DBValue[] {
const args = [];
for (let i = 0; i < values.columns.length; i++) {
const column = values.columns[i];
const value = values.getValue(column);
if (value !== undefined && value !== null) {
args.push(value);
}
}
return args;
}
/**
* Builds a where clause from the field and value with an optional operation. If the value is empty, 'is null' is added to the query for the field
* @param {string} field field name
* @param {Object} [value] optional value to filter on
* @param {string} [operation='='] optional operation
* @return {string} where clause
*/
buildWhereWithFieldAndValue(field: string, value: DBValue, operation = '='): string {
let whereString = '' + field + ' ';
if (value === undefined || value === null) {
whereString += 'is null';
} else {
whereString += operation + ' ?';
}
return whereString;
}
/**
* Query for all rows in the table that match
* @param {string} field field to match
* @param {*} value value to match
* @param {string} [groupBy] group by clause
* @param {string} [having] having clause
* @param {string} [orderBy] order by clause
* @return {Object[]} array of raw database objects
*/
queryForAllEq(
field: string,
value: DBValue,
groupBy?: string,
having?: string,
orderBy?: string,
): Record<string, DBValue>[] {
const whereString = this.buildWhereWithFieldAndValue(field, value);
const whereArgs = this.buildWhereArgs(value);
const query = SqliteQueryBuilder.buildQuery(
false,
"'" + this.gpkgTableName + "'",
undefined,
whereString,
undefined,
groupBy,
having,
orderBy,
);
return this.connection.all(query, whereArgs);
}
/**
* Count rows in the table optionally filtered by the parameters specified
* @param {module:dao/columnValues~ColumnValues|string} [fields] Either a ColumnValues object or a string specifying a field name
* @param {Object} [value] value to filter on if fields is a string
* @return {number} count of objects
*/
count(fields?: ColumnValues | string, value?: DBValue): number {
if (!fields) {
return this.connection.count(this.gpkgTableName);
}
let where;
let whereArgs;
let query;
if (fields instanceof ColumnValues) {
where = this.buildWhere(fields, 'and');
whereArgs = this.buildWhereArgs(fields);
query = SqliteQueryBuilder.buildCount("'" + this.gpkgTableName + "'", where);
} else {
const whereString = this.buildWhereWithFieldAndValue(fields, value);
whereArgs = this.buildWhereArgs(value);
query = SqliteQueryBuilder.buildCount("'" + this.gpkgTableName + "'", whereString);
}
const result = this.connection.get(query, whereArgs);
return result?.count;
}
/**
* Count rows in the table optionally filtered by the parameters specified
* @param {string} where where string
* @param {any[]} whereArgs arguments to filter on
* @return {number} count of objects
*/
countWhere(where: string, whereArgs: DBValue[]): number {
const query = SqliteQueryBuilder.buildCount("'" + this.gpkgTableName + "'", where);
const result = this.connection.get(query, whereArgs);
return result?.count;
}
/**
* Get the min of the column
* @param {string} column column name
* @param {string} [where] where clause
* @param {Object[]} [whereArgs] where args
* @return {number}
*/
minOfColumn(column: string, where?: string, whereArgs?: DBValue[]): number {
return this.connection.minOfColumn("'" + this.gpkgTableName + "'", column, where, whereArgs);
}
/**
* Get the max of the column
* @param {string} column column name
* @param {string} [where] where clause
* @param {Object[]} [whereArgs] where args
* @return {number}
*/
maxOfColumn(column: string, where?: string, whereArgs?: DBValue[]): number {
return this.connection.maxOfColumn("'" + this.gpkgTableName + "'", column, where, whereArgs);
}
/**
* Delete the object passed in. Object is deleted by id
* @param {Object} object object to delete
* @return {number} number of objects deleted
*/
delete(object: T | Record<string, DBValue>): number {
if (typeof (object as any).getId === 'function') {
return this.deleteById((object as any).getId());
}
return this.deleteByMultiId(this.getMultiId(object));
}
/**
* Delete the object specified by the id
* @param {Object} idValue id value
* @return {number} number of objects deleted
*/
deleteById(idValue: DBValue): number {
const where = this.buildPkWhere(idValue);
const whereArgs = this.buildPkWhereArgs(idValue);
return this.connection.delete("'" + this.gpkgTableName + "'", where, whereArgs);
}
/**
* Delete the object specified by the ids
* @param {module:dao/columnValues~ColumnValues} idValues id values
* @return {number} number of objects deleted
*/
deleteByMultiId(idValues: any[]): number {
const where = this.buildPkWhere(idValues);
const whereArgs = this.buildPkWhereArgs(idValues);
return this.connection.delete("'" + this.gpkgTableName + "'", where, whereArgs);
}
/**
* Delete objects that match the query
* @param {string} where where clause
* @param {Object[]} whereArgs where arguments
* @return {number} number of objects deleted
*/
deleteWhere(where: string, whereArgs: DBValue[]): number {
return this.connection.delete("'" + this.gpkgTableName + "'", where, whereArgs);
}
/**
* Delete all objects in the table
* @return {number} number of objects deleted
*/
deleteAll(): number {
return this.connection.delete("'" + this.gpkgTableName + "'", '', []);
}
/**
* Insert the object into the table
* @param {Object} object object to be inserted
* @return {number} id of the inserted object
*/
create(object: T): number {
const sql = SqliteQueryBuilder.buildInsert("'" + this.gpkgTableName + "'", object);
const insertObject = SqliteQueryBuilder.buildUpdateOrInsertObject(object);
return this.connection.insert(sql, insertObject);
}
/**
* Update all rows that match the query
* @param {module:dao/columnValues~ColumnValues} values values to insert
* @param {string} where where clause
* @param {Object[]} whereArgs where arguments
* @return {number} number of objects updated
*/
updateWithValues(
values: Record<string, DBValue>,
where: string,
whereArgs: DBValue[],
): {
changes: number;
lastInsertRowid: number;
} {
const update = SqliteQueryBuilder.buildUpdate("'" + this.gpkgTableName + "'", values, where, whereArgs);
return this.connection.run(update.sql, update.args);
}
/**
* Update the object specified
* @param {Object} object object with updated values
* @return {number} number of objects updated
*/
update(
object: T,
): {
changes: number;
lastInsertRowid: number;
} {
const updateValues = SqliteQueryBuilder.buildUpdateOrInsertObject(object);
let update = SqliteQueryBuilder.buildObjectUpdate("'" + this.gpkgTableName + "'", object);
const multiId = this.getMultiId(object);
if (multiId.length) {
let where = ' where ';
for (let i = 0; i < multiId.length; i++) {
where += '"' + this.idColumns[i] + '" = $' + SqliteQueryBuilder.fixColumnName(this.idColumns[i]);
updateValues[SqliteQueryBuilder.fixColumnName(this.idColumns[i])] = multiId[i];
}
update += where;
}
return this.connection.run(update, updateValues);
}
/**
* Queries for the object by id, and if it exists, updates it, otherwise creates a new object
* @param {Object} object object to update or create
* @return {number} number of objects modified
*/
createOrUpdate(object: T): number {
const existing = this.queryForSameId(object);
if (!existing) {
return this.create(object);
} else {
return this.update(object).changes;
}
}
/**
* Drops this table
* @return {boolean} results of the drop
*/
dropTable(): boolean {
return this.connection.dropTable(this.gpkgTableName);
}
/**
* Drops this table
*/
dropTableWithTableName(tableName: string) {
CoreSQLUtils.dropTable(this.geoPackage.connection, tableName);
}
/**
* Rename the table
* @param {string} newName
*/
rename(newName: string): void {
this.connection.run('ALTER TABLE ' + "'" + this.gpkgTableName + "' RENAME TO '" + newName + "'");
this.gpkgTableName = newName;
}
} | the_stack |
import type {
AtomicName,
AtomicProps,
BodyProps,
BodyShapeType,
BoxProps,
CannonWorkerAPI,
CompoundBodyProps,
ConeTwistConstraintOpts,
ConstraintOptns,
ConstraintTypes,
ContactMaterialOptions,
ConvexPolyhedronArgs,
ConvexPolyhedronProps,
CylinderProps,
DistanceConstraintOpts,
HeightfieldProps,
HingeConstraintOpts,
LockConstraintOpts,
MaterialOptions,
ParticleProps,
PlaneProps,
PointToPointConstraintOpts,
PropValue,
Quad,
RayhitEvent,
RayMode,
RayOptions,
SetOpName,
SphereArgs,
SphereProps,
SpringOptns,
SubscriptionName,
Subscriptions,
SubscriptionTarget,
TrimeshProps,
Triplet,
VectorName,
WheelInfoOptions,
} from '@pmndrs/cannon-worker-api'
import type { DependencyList, MutableRefObject, Ref, RefObject } from 'react'
import { useEffect, useLayoutEffect, useMemo, useRef, useState } from 'react'
import { DynamicDrawUsage, Euler, InstancedMesh, MathUtils, Object3D, Quaternion, Vector3 } from 'three'
import { useDebugContext } from './debug-context'
import type { CannonEvents } from './physics-context'
import { usePhysicsContext } from './physics-context'
export type AtomicApi<K extends AtomicName> = {
set: (value: AtomicProps[K]) => void
subscribe: (callback: (value: AtomicProps[K]) => void) => () => void
}
export type QuaternionApi = {
copy: ({ w, x, y, z }: Quaternion) => void
set: (x: number, y: number, z: number, w: number) => void
subscribe: (callback: (value: Quad) => void) => () => void
}
export type VectorApi = {
copy: ({ x, y, z }: Vector3 | Euler) => void
set: (x: number, y: number, z: number) => void
subscribe: (callback: (value: Triplet) => void) => () => void
}
export type WorkerApi = {
[K in AtomicName]: AtomicApi<K>
} & {
[K in VectorName]: VectorApi
} & {
applyForce: (force: Triplet, worldPoint: Triplet) => void
applyImpulse: (impulse: Triplet, worldPoint: Triplet) => void
applyLocalForce: (force: Triplet, localPoint: Triplet) => void
applyLocalImpulse: (impulse: Triplet, localPoint: Triplet) => void
applyTorque: (torque: Triplet) => void
quaternion: QuaternionApi
rotation: VectorApi
scaleOverride: (scale: Triplet) => void
sleep: () => void
wakeUp: () => void
}
export interface PublicApi extends WorkerApi {
at: (index: number) => WorkerApi
}
export type Api<O extends Object3D> = [RefObject<O>, PublicApi]
const temp = new Object3D()
function useForwardedRef<T>(ref: Ref<T>): MutableRefObject<T | null> {
const nullRef = useRef<T>(null)
return ref && typeof ref !== 'function' ? ref : nullRef
}
function capitalize<T extends string>(str: T): Capitalize<T> {
return (str.charAt(0).toUpperCase() + str.slice(1)) as Capitalize<T>
}
function getUUID(ref: Ref<Object3D>, index?: number): string | null {
const suffix = index === undefined ? '' : `/${index}`
if (typeof ref === 'function') return null
return ref && ref.current && `${ref.current.uuid}${suffix}`
}
const e = new Euler()
const q = new Quaternion()
const quaternionToRotation = (callback: (v: Triplet) => void) => {
return (v: Quad) => callback(e.setFromQuaternion(q.fromArray(v)).toArray() as Triplet)
}
let incrementingId = 0
function subscribe<T extends SubscriptionName>(
ref: RefObject<Object3D>,
worker: CannonWorkerAPI,
subscriptions: Subscriptions,
type: T,
index?: number,
target: SubscriptionTarget = 'bodies',
) {
return (callback: (value: PropValue<T>) => void) => {
const id = incrementingId++
subscriptions[id] = { [type]: callback }
const uuid = getUUID(ref, index)
uuid && worker.subscribe({ props: { id, target, type }, uuid })
return () => {
delete subscriptions[id]
worker.unsubscribe({ props: id })
}
}
}
function prepare(object: Object3D, { position = [0, 0, 0], rotation = [0, 0, 0], userData = {} }: BodyProps) {
object.userData = userData
object.position.set(...position)
object.rotation.set(...rotation)
object.updateMatrix()
}
function setupCollision(
events: CannonEvents,
{ onCollide, onCollideBegin, onCollideEnd }: Partial<BodyProps>,
uuid: string,
) {
events[uuid] = {
collide: onCollide,
collideBegin: onCollideBegin,
collideEnd: onCollideEnd,
}
}
type GetByIndex<T extends BodyProps> = (index: number) => T
type ArgFn<T> = (args: T) => unknown[]
function useBody<B extends BodyProps<unknown[]>, O extends Object3D>(
type: BodyShapeType,
fn: GetByIndex<B>,
argsFn: ArgFn<B['args']>,
fwdRef: Ref<O> = null,
deps: DependencyList = [],
): Api<O> {
const ref = useForwardedRef(fwdRef)
const { events, refs, scaleOverrides, subscriptions, worker } = usePhysicsContext()
const debugApi = useDebugContext()
useLayoutEffect(() => {
if (!ref.current) {
// When the reference isn't used we create a stub
// The body doesn't have a visual representation but can still be constrained
// Yes, this type may be technically incorrect
ref.current = new Object3D() as O
}
const object = ref.current
const currentWorker = worker
const objectCount =
object instanceof InstancedMesh ? (object.instanceMatrix.setUsage(DynamicDrawUsage), object.count) : 1
const uuid =
object instanceof InstancedMesh
? new Array(objectCount).fill(0).map((_, i) => `${object.uuid}/${i}`)
: [object.uuid]
const props: (B & { args: unknown })[] =
object instanceof InstancedMesh
? uuid.map((id, i) => {
const props = fn(i)
prepare(temp, props)
object.setMatrixAt(i, temp.matrix)
object.instanceMatrix.needsUpdate = true
refs[id] = object
debugApi?.add(id, props, type)
setupCollision(events, props, id)
return { ...props, args: argsFn(props.args) }
})
: uuid.map((id, i) => {
const props = fn(i)
prepare(object, props)
refs[id] = object
debugApi?.add(id, props, type)
setupCollision(events, props, id)
return { ...props, args: argsFn(props.args) }
})
// Register on mount, unregister on unmount
currentWorker.addBodies({
props: props.map(({ onCollide, onCollideBegin, onCollideEnd, ...serializableProps }) => {
return { onCollide: Boolean(onCollide), ...serializableProps }
}),
type,
uuid,
})
return () => {
uuid.forEach((id) => {
delete refs[id]
debugApi?.remove(id)
delete events[id]
})
currentWorker.removeBodies({ uuid })
}
}, deps)
const api = useMemo(() => {
const makeAtomic = <T extends AtomicName>(type: T, index?: number) => {
const op: SetOpName<T> = `set${capitalize(type)}`
return {
set: (value: PropValue<T>) => {
const uuid = getUUID(ref, index)
uuid &&
worker[op]({
props: value,
uuid,
} as never)
},
subscribe: subscribe(ref, worker, subscriptions, type, index),
}
}
const makeQuaternion = (index?: number) => {
const type = 'quaternion'
return {
copy: ({ w, x, y, z }: Quaternion) => {
const uuid = getUUID(ref, index)
uuid && worker.setQuaternion({ props: [x, y, z, w], uuid })
},
set: (x: number, y: number, z: number, w: number) => {
const uuid = getUUID(ref, index)
uuid && worker.setQuaternion({ props: [x, y, z, w], uuid })
},
subscribe: subscribe(ref, worker, subscriptions, type, index),
}
}
const makeRotation = (index?: number) => {
return {
copy: ({ x, y, z }: Vector3 | Euler) => {
const uuid = getUUID(ref, index)
uuid && worker.setRotation({ props: [x, y, z], uuid })
},
set: (x: number, y: number, z: number) => {
const uuid = getUUID(ref, index)
uuid && worker.setRotation({ props: [x, y, z], uuid })
},
subscribe: (callback: (value: Triplet) => void) => {
const id = incrementingId++
const target = 'bodies'
const type = 'quaternion'
const uuid = getUUID(ref, index)
subscriptions[id] = { [type]: quaternionToRotation(callback) }
uuid && worker.subscribe({ props: { id, target, type }, uuid })
return () => {
delete subscriptions[id]
worker.unsubscribe({ props: id })
}
},
}
}
const makeVec = (type: VectorName, index?: number) => {
const op: SetOpName<VectorName> = `set${capitalize(type)}`
return {
copy: ({ x, y, z }: Vector3 | Euler) => {
const uuid = getUUID(ref, index)
uuid && worker[op]({ props: [x, y, z], uuid })
},
set: (x: number, y: number, z: number) => {
const uuid = getUUID(ref, index)
uuid && worker[op]({ props: [x, y, z], uuid })
},
subscribe: subscribe(ref, worker, subscriptions, type, index),
}
}
function makeApi(index?: number): WorkerApi {
return {
allowSleep: makeAtomic('allowSleep', index),
angularDamping: makeAtomic('angularDamping', index),
angularFactor: makeVec('angularFactor', index),
angularVelocity: makeVec('angularVelocity', index),
applyForce(force: Triplet, worldPoint: Triplet) {
const uuid = getUUID(ref, index)
uuid && worker.applyForce({ props: [force, worldPoint], uuid })
},
applyImpulse(impulse: Triplet, worldPoint: Triplet) {
const uuid = getUUID(ref, index)
uuid && worker.applyImpulse({ props: [impulse, worldPoint], uuid })
},
applyLocalForce(force: Triplet, localPoint: Triplet) {
const uuid = getUUID(ref, index)
uuid && worker.applyLocalForce({ props: [force, localPoint], uuid })
},
applyLocalImpulse(impulse: Triplet, localPoint: Triplet) {
const uuid = getUUID(ref, index)
uuid && worker.applyLocalImpulse({ props: [impulse, localPoint], uuid })
},
applyTorque(torque: Triplet) {
const uuid = getUUID(ref, index)
uuid && worker.applyTorque({ props: [torque], uuid })
},
collisionFilterGroup: makeAtomic('collisionFilterGroup', index),
collisionFilterMask: makeAtomic('collisionFilterMask', index),
collisionResponse: makeAtomic('collisionResponse', index),
fixedRotation: makeAtomic('fixedRotation', index),
isTrigger: makeAtomic('isTrigger', index),
linearDamping: makeAtomic('linearDamping', index),
linearFactor: makeVec('linearFactor', index),
mass: makeAtomic('mass', index),
material: makeAtomic('material', index),
position: makeVec('position', index),
quaternion: makeQuaternion(index),
rotation: makeRotation(index),
scaleOverride(scale) {
const uuid = getUUID(ref, index)
if (uuid) scaleOverrides[uuid] = new Vector3(...scale)
},
sleep() {
const uuid = getUUID(ref, index)
uuid && worker.sleep({ uuid })
},
sleepSpeedLimit: makeAtomic('sleepSpeedLimit', index),
sleepTimeLimit: makeAtomic('sleepTimeLimit', index),
userData: makeAtomic('userData', index),
velocity: makeVec('velocity', index),
wakeUp() {
const uuid = getUUID(ref, index)
uuid && worker.wakeUp({ uuid })
},
}
}
const cache: { [index: number]: WorkerApi } = {}
return {
...makeApi(undefined),
at: (index: number) => cache[index] || (cache[index] = makeApi(index)),
}
}, [])
return [ref, api]
}
function makeTriplet(v: Vector3 | Triplet): Triplet {
return v instanceof Vector3 ? [v.x, v.y, v.z] : v
}
export function usePlane<O extends Object3D>(
fn: GetByIndex<PlaneProps>,
fwdRef?: Ref<O>,
deps?: DependencyList,
) {
return useBody('Plane', fn, () => [], fwdRef, deps)
}
export function useBox<O extends Object3D>(fn: GetByIndex<BoxProps>, fwdRef?: Ref<O>, deps?: DependencyList) {
const defaultBoxArgs: Triplet = [1, 1, 1]
return useBody('Box', fn, (args = defaultBoxArgs): Triplet => args, fwdRef, deps)
}
export function useCylinder<O extends Object3D>(
fn: GetByIndex<CylinderProps>,
fwdRef?: Ref<O>,
deps?: DependencyList,
) {
return useBody('Cylinder', fn, (args = [] as []) => args, fwdRef, deps)
}
export function useHeightfield<O extends Object3D>(
fn: GetByIndex<HeightfieldProps>,
fwdRef?: Ref<O>,
deps?: DependencyList,
) {
return useBody('Heightfield', fn, (args) => args, fwdRef, deps)
}
export function useParticle<O extends Object3D>(
fn: GetByIndex<ParticleProps>,
fwdRef?: Ref<O>,
deps?: DependencyList,
) {
return useBody('Particle', fn, () => [], fwdRef, deps)
}
export function useSphere<O extends Object3D>(
fn: GetByIndex<SphereProps>,
fwdRef?: Ref<O>,
deps?: DependencyList,
) {
return useBody(
'Sphere',
fn,
(args: SphereArgs = [1]): SphereArgs => {
if (!Array.isArray(args)) throw new Error('useSphere args must be an array')
return [args[0]]
},
fwdRef,
deps,
)
}
export function useTrimesh<O extends Object3D>(
fn: GetByIndex<TrimeshProps>,
fwdRef?: Ref<O>,
deps?: DependencyList,
) {
return useBody<TrimeshProps, O>('Trimesh', fn, (args) => args, fwdRef, deps)
}
export function useConvexPolyhedron<O extends Object3D>(
fn: GetByIndex<ConvexPolyhedronProps>,
fwdRef?: Ref<O>,
deps?: DependencyList,
) {
return useBody<ConvexPolyhedronProps, O>(
'ConvexPolyhedron',
fn,
([vertices, faces, normals, axes, boundingSphereRadius] = []): ConvexPolyhedronArgs<Triplet> => [
vertices && vertices.map(makeTriplet),
faces,
normals && normals.map(makeTriplet),
axes && axes.map(makeTriplet),
boundingSphereRadius,
],
fwdRef,
deps,
)
}
export function useCompoundBody<O extends Object3D>(
fn: GetByIndex<CompoundBodyProps>,
fwdRef?: Ref<O>,
deps?: DependencyList,
) {
return useBody('Compound', fn, (args) => args as unknown[], fwdRef, deps)
}
type ConstraintApi<A extends Object3D, B extends Object3D> = [
RefObject<A>,
RefObject<B>,
{
disable: () => void
enable: () => void
},
]
type HingeConstraintApi<A extends Object3D, B extends Object3D> = [
RefObject<A>,
RefObject<B>,
{
disable: () => void
disableMotor: () => void
enable: () => void
enableMotor: () => void
setMotorMaxForce: (value: number) => void
setMotorSpeed: (value: number) => void
},
]
type SpringApi<A extends Object3D, B extends Object3D> = [
RefObject<A>,
RefObject<B>,
{
setDamping: (value: number) => void
setRestLength: (value: number) => void
setStiffness: (value: number) => void
},
]
type ConstraintORHingeApi<
T extends 'Hinge' | ConstraintTypes,
A extends Object3D,
B extends Object3D,
> = T extends ConstraintTypes ? ConstraintApi<A, B> : HingeConstraintApi<A, B>
function useConstraint<T extends 'Hinge' | ConstraintTypes, A extends Object3D, B extends Object3D>(
type: T,
bodyA: Ref<A>,
bodyB: Ref<B>,
optns: ConstraintOptns | HingeConstraintOpts = {},
deps: DependencyList = [],
): ConstraintORHingeApi<T, A, B> {
const { worker } = usePhysicsContext()
const uuid = MathUtils.generateUUID()
const refA = useForwardedRef(bodyA)
const refB = useForwardedRef(bodyB)
useEffect(() => {
if (refA.current && refB.current) {
worker.addConstraint({
props: [refA.current.uuid, refB.current.uuid, optns],
type,
uuid,
})
return () => worker.removeConstraint({ uuid })
}
}, deps)
const api = useMemo(() => {
const enableDisable = {
disable: () => worker.disableConstraint({ uuid }),
enable: () => worker.enableConstraint({ uuid }),
}
if (type === 'Hinge') {
return {
...enableDisable,
disableMotor: () => worker.disableConstraintMotor({ uuid }),
enableMotor: () => worker.enableConstraintMotor({ uuid }),
setMotorMaxForce: (value: number) => worker.setConstraintMotorMaxForce({ props: value, uuid }),
setMotorSpeed: (value: number) => worker.setConstraintMotorSpeed({ props: value, uuid }),
}
}
return enableDisable
}, deps)
return [refA, refB, api] as ConstraintORHingeApi<T, A, B>
}
export function usePointToPointConstraint<A extends Object3D, B extends Object3D>(
bodyA: Ref<A> = null,
bodyB: Ref<B> = null,
optns: PointToPointConstraintOpts,
deps: DependencyList = [],
) {
return useConstraint('PointToPoint', bodyA, bodyB, optns, deps)
}
export function useConeTwistConstraint<A extends Object3D, B extends Object3D>(
bodyA: Ref<A> = null,
bodyB: Ref<B> = null,
optns: ConeTwistConstraintOpts,
deps: DependencyList = [],
) {
return useConstraint('ConeTwist', bodyA, bodyB, optns, deps)
}
export function useDistanceConstraint<A extends Object3D, B extends Object3D>(
bodyA: Ref<A> = null,
bodyB: Ref<B> = null,
optns: DistanceConstraintOpts,
deps: DependencyList = [],
) {
return useConstraint('Distance', bodyA, bodyB, optns, deps)
}
export function useHingeConstraint<A extends Object3D, B extends Object3D>(
bodyA: Ref<A> = null,
bodyB: Ref<B> = null,
optns: HingeConstraintOpts,
deps: DependencyList = [],
) {
return useConstraint('Hinge', bodyA, bodyB, optns, deps)
}
export function useLockConstraint<A extends Object3D, B extends Object3D>(
bodyA: Ref<A> = null,
bodyB: Ref<B> = null,
optns: LockConstraintOpts,
deps: DependencyList = [],
) {
return useConstraint('Lock', bodyA, bodyB, optns, deps)
}
export function useSpring<A extends Object3D, B extends Object3D>(
bodyA: Ref<A> = null,
bodyB: Ref<B> = null,
optns: SpringOptns,
deps: DependencyList = [],
): SpringApi<A, B> {
const { worker } = usePhysicsContext()
const [uuid] = useState(() => MathUtils.generateUUID())
const refA = useForwardedRef(bodyA)
const refB = useForwardedRef(bodyB)
useEffect(() => {
if (refA.current && refB.current) {
worker.addSpring({
props: [refA.current.uuid, refB.current.uuid, optns],
uuid,
})
return () => {
worker.removeSpring({ uuid })
}
}
}, deps)
const api = useMemo(
() => ({
setDamping: (value: number) => worker.setSpringDamping({ props: value, uuid }),
setRestLength: (value: number) => worker.setSpringRestLength({ props: value, uuid }),
setStiffness: (value: number) => worker.setSpringStiffness({ props: value, uuid }),
}),
deps,
)
return [refA, refB, api]
}
function useRay(
mode: RayMode,
options: RayOptions,
callback: (e: RayhitEvent) => void,
deps: DependencyList = [],
) {
const { worker, events } = usePhysicsContext()
const [uuid] = useState(() => MathUtils.generateUUID())
useEffect(() => {
events[uuid] = { rayhit: callback }
worker.addRay({ props: { ...options, mode }, uuid })
return () => {
worker.removeRay({ uuid })
delete events[uuid]
}
}, deps)
}
export function useRaycastClosest(
options: RayOptions,
callback: (e: RayhitEvent) => void,
deps: DependencyList = [],
) {
useRay('Closest', options, callback, deps)
}
export function useRaycastAny(
options: RayOptions,
callback: (e: RayhitEvent) => void,
deps: DependencyList = [],
) {
useRay('Any', options, callback, deps)
}
export function useRaycastAll(
options: RayOptions,
callback: (e: RayhitEvent) => void,
deps: DependencyList = [],
) {
useRay('All', options, callback, deps)
}
export interface RaycastVehiclePublicApi {
applyEngineForce: (value: number, wheelIndex: number) => void
setBrake: (brake: number, wheelIndex: number) => void
setSteeringValue: (value: number, wheelIndex: number) => void
sliding: {
subscribe: (callback: (sliding: boolean) => void) => void
}
}
function isString(v: unknown): v is string {
return typeof v === 'string'
}
export interface RaycastVehicleProps {
chassisBody: Ref<Object3D>
indexForwardAxis?: number
indexRightAxis?: number
indexUpAxis?: number
wheelInfos: WheelInfoOptions[]
wheels: Ref<Object3D>[]
}
export function useRaycastVehicle<O extends Object3D>(
fn: () => RaycastVehicleProps,
fwdRef: Ref<O> = null,
deps: DependencyList = [],
): [RefObject<O>, RaycastVehiclePublicApi] {
const ref = useForwardedRef(fwdRef)
const { worker, subscriptions } = usePhysicsContext()
useLayoutEffect(() => {
if (!ref.current) {
// When the reference isn't used we create a stub
// The body doesn't have a visual representation but can still be constrained
// Yes, this type may be technically incorrect
ref.current = new Object3D() as O
}
const currentWorker = worker
const uuid: string = ref.current.uuid
const {
chassisBody,
indexForwardAxis = 2,
indexRightAxis = 0,
indexUpAxis = 1,
wheelInfos,
wheels,
} = fn()
const chassisBodyUUID = getUUID(chassisBody)
const wheelUUIDs = wheels.map((ref) => getUUID(ref))
if (!chassisBodyUUID || !wheelUUIDs.every(isString)) return
currentWorker.addRaycastVehicle({
props: [chassisBodyUUID, wheelUUIDs, wheelInfos, indexForwardAxis, indexRightAxis, indexUpAxis],
uuid,
})
return () => {
currentWorker.removeRaycastVehicle({ uuid })
}
}, deps)
const api = useMemo<RaycastVehiclePublicApi>(() => {
return {
applyEngineForce(value: number, wheelIndex: number) {
const uuid = getUUID(ref)
uuid &&
worker.applyRaycastVehicleEngineForce({
props: [value, wheelIndex],
uuid,
})
},
setBrake(brake: number, wheelIndex: number) {
const uuid = getUUID(ref)
uuid && worker.setRaycastVehicleBrake({ props: [brake, wheelIndex], uuid })
},
setSteeringValue(value: number, wheelIndex: number) {
const uuid = getUUID(ref)
uuid &&
worker.setRaycastVehicleSteeringValue({
props: [value, wheelIndex],
uuid,
})
},
sliding: {
subscribe: subscribe(ref, worker, subscriptions, 'sliding', undefined, 'vehicles'),
},
}
}, deps)
return [ref, api]
}
export function useContactMaterial(
materialA: MaterialOptions,
materialB: MaterialOptions,
options: ContactMaterialOptions,
deps: DependencyList = [],
): void {
const { worker } = usePhysicsContext()
const [uuid] = useState(() => MathUtils.generateUUID())
useEffect(() => {
worker.addContactMaterial({
props: [materialA, materialB, options],
uuid,
})
return () => {
worker.removeContactMaterial({ uuid })
}
}, deps)
} | the_stack |
declare var __dirname: any;
declare var require: any;
declare var process: any;
declare var gc: any;
declare var Buffer: any;
import * as nbind from '..';
import * as testLib from './testlib';
import {Int64} from '../dist/int64';
const test = require('tap').test;
const binding = nbind.init<typeof testLib>();
const testModule = binding.lib;
let prepareGC: (obj: any) => void;
var lost: any = null;
const global = (0 || eval)('this');
if(global.gc) {
prepareGC = function(obj) { gc(); }
} else {
console.warn('Garbage collector is not accessible. Faking it...');
console.warn('Run Node.js with --expose-gc to disable this warning.');
console.warn('');
prepareGC = function(obj) { lost = obj; }
global.gc = function() {
if(lost) lost.free();
lost = null;
}
}
binding.toggleLightGC(true);
class Coord {
constructor(x: number, y: number) {
this.x = x;
this.y = y;
}
fromJS(output: (x: number, y: number) => void) {
output(this.x, this.y);
}
x: number;
y: number;
}
type CoordJS = Coord;
declare module './testlib' {
interface Coord extends CoordJS {}
}
test('Methods and primitive types', function(t: any) {
var Type = testModule.PrimitiveMethods;
(function() {
var obj = new Type(0);
t.strictEqual(Type.negateStatic(false), true);
t.strictEqual(obj.negate(false), true);
t.strictEqual(Type.incrementIntStatic(1), 2);
t.strictEqual(obj.incrementInt(1), 2);
t.type(Type.incrementStateStatic(), 'undefined');
t.strictEqual(Type.getStateStatic(), 1);
t.type(obj.incrementState(), 'undefined');
t.strictEqual(obj.getState(), 2);
t.strictEqual(Type.strLengthStatic('foo'), 3);
t.strictEqual(obj.strLength('foobar'), 6);
t.strictEqual(Type.catenateStatic('foo', 'bar'), 'foobar');
t.strictEqual(obj.catenate('Java', 'Script'), 'JavaScript');
t.strictEqual(obj.catenate2('Java', 'Script'), 'JavaScript');
t.strictEqual(Type.strLengthStatic(123 as any as string), 3);
obj = new Type(0, 'quux');
t.strictEqual(Type.getStringStatic(), 'quux');
t.strictEqual(obj.getString(), 'quux');
})();
t.end();
});
test('Constructors and destructors', function(t: any) {
const Type = testModule.PrimitiveMethods;
(function() {
let obj = new Type();
t.strictEqual(Type.getStateStatic(), 42);
obj = new Type(54);
t.strictEqual(Type.getStateStatic(), 54);
// Constructing with or without "new" operator should work identically.
obj = (Type as any as (p0?: number) => testLib.PrimitiveMethods)();
t.strictEqual(Type.getStateStatic(), 42);
prepareGC(obj);
obj = (Type as any as (p0?: number) => testLib.PrimitiveMethods)(54);
t.strictEqual(Type.getStateStatic(), 54);
gc();
// Destructor should have incremented state.
t.strictEqual(Type.getStateStatic(), 55);
prepareGC(obj);
t.strictEqual(obj.negate(false), true);
})();
gc();
// Destructor should have incremented state again.
t.strictEqual(Type.getStateStatic(), 56);
t.end();
});
test('Functions', function(t: any) {
t.strictEqual(testModule.incrementInt(1), 2);
t.strictEqual(testModule.decrementInt(2), 1);
t.end();
});
test('Getters and setters', function(t: any) {
const Type = testModule.GetterSetter;
const obj = new Type();
t.strictEqual(obj.x, 1);
t.strictEqual(obj.y, 2);
t.strictEqual(obj.z, 3);
t.strictEqual(obj.t, 'foobar');
t.strictEqual(obj.XYZ, 6);
obj.y = 4;
obj.z = 5;
obj.t = 'foo';
t.strictEqual(obj.y, 4);
t.strictEqual(obj.z, 5);
t.strictEqual(obj.t, 'foo');
// TODO: Add a property taking an object and check that a wrong type throws.
// t.throws(function() {
// obj.t = 0;
// }, {message: 'Type mismatch'});
t.end();
});
test('Callbacks', function(t: any) {
const Type = testModule.Callback;
t.type(Type.callVoidFunc(function() {}), 'undefined');
t.strictEqual(Type.callNegate(function(x: boolean) {return(!x);}, false), true);
t.strictEqual(Type.callNegate2(function(x: boolean) {return(!x);}, false), true);
t.strictEqual(Type.callAddInt(function(x: number, y: number) {return(x + y);}, 40, 2), 42);
t.strictEqual(Type.callAddInt2(function(x: number, y: number) {return(x + y);}, 40, 2), 42);
t.strictEqual(Type.callIncrementDouble(function(x: number) {return(x + 0.25);}, 0.5), 0.75);
t.strictEqual(Type.callCatenate(function(x: string, y: string) {return(x + y);}, 'foo', 'bar'), 'foobar');
t.strictEqual(Type.callCatenate2(function(x: string, y: string) {return(x + y);}, 'foo', 'bar'), 'foobar');
t.throws(function() {
Type.callNegate({} as any as (x: boolean) => boolean, true);
}, {message: 'Type mismatch'});
Type.callCStrings(function(foo: string, bar: string, baz: string) {
t.strictDeepEqual([foo, bar, baz], ['foo', 'bar', 'baz']);
});
if(process.versions.modules > 14) {
// Node 0.12 and earlier seem unable to catch exceptions from callbacks.
t.throws(function() {
Type.callNegate(function(x: boolean) { throw(new Error('Test error')); }, true);
}, {message: 'Test error'});
}
t.end();
});
test('Value objects', function(t: any) {
const Type = testModule.Value;
// t.throws(function() {
// Type.getCoord()
// }, {message: 'Value type JavaScript class is missing or not registered'});
t.type(Type.getCoord(), 'object');
binding.bind('Coord', Coord);
var xy = Type.getCoord();
t.strictEqual(xy.x, 60);
t.strictEqual(xy.y, 25);
xy.fromJS(function() {});
xy = Type.callWithCoord(function(a: Coord, b: Coord) {
t.strictEqual(a.x, xy.x);
t.strictEqual(a.y, xy.y);
t.strictEqual(b.x, 123);
t.strictEqual(b.y, 456);
// TODO: if we don't return a Coord here as expected by the C++ side, it crashes!
return(a);
}, xy, new Coord(123, 456));
t.strictEqual(xy.x, 60);
t.strictEqual(xy.y, 25);
t.end();
});
test('Pointers and references', function(t: any) {
const Type = testModule.Reference;
const own = new Type();
const value = Type.getValue();
const ptr = Type.getPtr();
const ref = Type.getRef();
const constPtr = Type.getConstPtr();
const constRef = Type.getConstRef();
const types = [ own, value, ptr, ref, constPtr, constRef ];
for(var i = 0; i < types.length; ++i) {
t.type(Type.readPtr(types[i]!), 'undefined');
t.type(Type.readRef(types[i]!), 'undefined');
if(types[i] == constPtr || types[i] == constRef) {
t.throws(function() {
Type.writePtr(types[i]!);
}, {message: 'Passing a const value as a non-const argument'});
t.throws(function() {
Type.writeRef(types[i]!);
}, {message: 'Passing a const value as a non-const argument'});
} else {
t.type(Type.writePtr(types[i]!), 'undefined');
t.type(Type.writeRef(types[i]!), 'undefined');
}
}
t.type(ptr!.read(), 'undefined');
t.type(ref.read(), 'undefined');
t.type(ptr!.write(), 'undefined');
t.type(ref.write(), 'undefined');
t.type(constPtr.read(), 'undefined');
t.type(constRef.read(), 'undefined');
t.throws(function() {
constPtr.write();
}, {message: 'Calling a non-const method on a const object'});
t.throws(function() {
constRef.write();
}, {message: 'Calling a non-const method on a const object'});
t.end();
});
test('Arrays', function(t: any) {
const ArrayType = testModule.Array;
const VectorType = testModule.Vector;
const arr = [13, 21, 34];
t.strictDeepEqual(ArrayType.getInts(), arr);
t.strictDeepEqual(VectorType.getInts(), arr);
t.strictDeepEqual(ArrayType.callWithInts(function(a: number[]) {
t.strictDeepEqual(a, arr);
return(arr);
}, arr), arr);
t.strictDeepEqual(VectorType.callWithInts(function(a: number[]) {
t.strictDeepEqual(a, arr);
return(arr);
}, arr), arr);
t.throws(function() {
ArrayType.callWithInts(function(a: number[]) {}, [1, 2]);
}, {message: 'Type mismatch'});
const arr2 = ['foo', 'bar', 'baz'];
t.strictDeepEqual(VectorType.callWithStrings(function(a: string[]) {
t.strictDeepEqual(a, arr2);
return(arr2);
}, arr2), arr2);
t.end();
});
test('Nullable', function(t: any) {
const Type = testModule.Nullable;
Type.foo(Type.getCoord()!);
t.strictEqual(Type.getNull(), null);
t.throws(function() {
Type.foo(null as any as testLib.Coord);
}, {message: 'Type mismatch'});
Type.bar(null);
t.end();
});
test('Strict conversion policy', function(t: any) {
const typeList = [ testModule, testModule.StrictStatic, new testModule.Strict() ];
for(let i = 0; i < typeList.length; ++i) {
var Type = typeList[i];
t.strictEqual(Type.testInt(1), 1);
t.strictEqual(Type.testBool(true), true);
t.strictEqual(Type.testString('foo'), 'foo');
t.strictEqual(Type.testCString('foo'), 'foo');
t.strictEqual(Type.testInt('123' as any as number), 123);
t.strictEqual(Type.testBool(0 as any as boolean), false);
t.strictEqual(Type.testString(123 as any as string), '123');
t.strictEqual(Type.testCString(123 as any as string), '123');
t.strictEqual(Type.strictInt(1), 1);
t.strictEqual(Type.strictBool(true), true);
t.strictEqual(Type.strictString('foo'), 'foo');
t.strictEqual(Type.strictCString('foo'), 'foo');
t.throws(function() {
Type.strictInt('123' as any as number);
}, {message: 'Type mismatch'});
t.throws(function() {
Type.strictBool(0 as any as boolean);
}, {message: 'Type mismatch'});
t.throws(function() {
Type.strictString(123 as any as string);
}, {message: 'Type mismatch'});
t.throws(function() {
Type.strictCString(123 as any as string);
}, {message: 'Type mismatch'});
}
t.end();
});
test('Inheritance', function(t: any) {
const A = testModule.InheritanceA;
const B = testModule.InheritanceB;
const C = testModule.InheritanceC;
const D = testModule.InheritanceD;
const d = new D();
t.ok(d instanceof A);
t.ok(d instanceof B || d instanceof C);
t.ok(d instanceof D);
t.ok(d.a instanceof A);
t.ok(d.b instanceof A);
t.ok(d.c instanceof A);
t.ok(d.b instanceof B);
t.ok(d.c instanceof C);
t.ok(d.b.a instanceof A);
t.ok(d.c.a instanceof A);
t.throws(function() {
d.useA.call(new Date());
}, {message: 'Type mismatch'});
t.strictEqual(d.useA(), 1);
t.strictEqual(d.useB(), 2);
t.strictEqual(d.useC(), 3);
t.strictEqual(d.useD(), 4);
t.strictEqual(d.a.useA(), 1);
t.strictEqual(d.b.useB(), 2);
t.strictEqual(d.c.useC(), 3);
t.strictEqual(d.b.a.useA(), 1);
t.strictEqual(d.c.a.useA(), 1);
t.strictEqual(A.staticA(d), 1);
t.strictEqual(A.staticA(d.b), 1);
t.strictEqual(A.staticA(d.c), 1);
t.strictEqual(B.staticB(d), 2);
t.strictEqual(B.staticB(d.b), 2);
t.throws(function() {
B.staticB(d.c as any);
}, {message: 'Type mismatch'});
t.strictEqual(C.staticC(d), 3);
t.strictEqual(C.staticC(d.c), 3);
t.throws(function() {
C.staticC(d.b as any);
}, {message: 'Type mismatch'});
t.end();
});
test('64-bit integers', function(t: any) {
const Type = testModule.PrimitiveMethods;
let lastDigit: string;
let x = Type.ftoul(42);
let y = Type.ftol(42);
let z = Type.ftol(-42);
t.strictEqual(Type.ultof(x), 42);
t.strictEqual(Type.ltof(y), 42);
t.strictEqual(Type.ltof(z), -42);
for(var j = 0; j < 2; ++j) {
for(var n = 2, i = 1; i < 63; ++i) {
x = Type.ftoull(n);
y = Type.ftoll(n);
z = Type.ftoll(-n);
t.strictEqual(Type.ulltof(x), n);
t.strictEqual(Type.lltof(y), n);
t.strictEqual(Type.lltof(z), -n);
if(j) {
lastDigit = '5137'.charAt(i & 3);
t.strictEqual(('' + x).substr(-1), lastDigit);
t.strictEqual(('' + y).substr(-1), lastDigit);
t.strictEqual(('' + z).substr(-1), lastDigit);
}
n *= 2;
}
binding.bind('Int64', Int64);
}
t.end();
});
test('Overloaded functions', function(t: any) {
const Type = testModule.Overload;
const obj = new Type();
t.strictEqual(obj.test(0), 1);
t.strictEqual(obj.test2(0, 0), 2);
t.strictEqual(obj.testConst(0), 1);
t.strictEqual(obj.testConst2(0, 0), 2);
t.strictEqual(Type.testStatic(0), 1);
t.strictEqual(Type.testStatic2(0, 0), 2);
t.strictEqual(testModule.multiTest(0), 1);
t.strictEqual(testModule.multiTest2(0, 0), 2);
t.end();
});
test('Smart pointers', function(t: any) {
const Type = testModule.Smart;
const obj = Type.make(31337);
obj!.test();
Type.testStatic(obj!);
Type.testShared(obj!);
obj!.free!();
// obj.free();
t.end();
});
test('Buffers', function(t: any) {
const Type = testModule.Buffer;
let buf: any;
if(ArrayBuffer && (typeof(process) != 'object' || typeof(process.versions) != 'object' || process.versions.modules >= 14)) {
buf = new ArrayBuffer(16);
var view = new Uint8Array(buf);
for(var i = 0; i < 16; ++i) view[i] = i;
t.strictEqual(Type.sum(buf), 120);
t.strictEqual(Type.sum(view), 120);
t.strictEqual(Type.sum(view.subarray(2, 12)), 65);
t.strictEqual(Type.sum(new Uint8Array(buf, 2, 12)), 90);
Type.mul2(buf);
t.strictEqual(Type.sum(buf), 240);
Type.mul2(view);
t.strictEqual(Type.sum(view), 480);
}
if(Buffer) {
buf = Buffer.alloc ? Buffer.alloc(16) : new Buffer(16);
for(var i = 0; i < 16; ++i) buf[i] = i;
t.strictEqual(Type.sum(buf), 120);
Type.mul2(buf);
t.strictEqual(Type.sum(buf), 240);
}
t.end();
});
test('Reflection', function(t: any) {
const fs = require('fs');
const path = require('path').resolve(__dirname, 'reflect.txt');
const reflect = new (require('../dist/reflect.js').Reflect)(binding);
t.strictEqual(
reflect.dumpPseudo().replace(/int64/g, 'int32'),
fs.readFileSync(path, 'utf-8').replace(/int64/g, 'int32')
);
t.end();
}); | the_stack |
* @module OrbitGT
*/
//package orbitgt.spatial.ecrs;
type int8 = number;
type int16 = number;
type int32 = number;
type float32 = number;
type float64 = number;
import { ASystem } from "../../system/runtime/ASystem";
import { Numbers } from "../../system/runtime/Numbers";
import { Registry } from "./Registry";
/**
* Class Unit defines a unit of measure.
*
* @version 1.0 July 2005
*/
/** @internal */
export class Unit {
/** The "meter" unit code (length) */
public static readonly METER: int32 = 9001;
/** The "foot" unit code (length) */
public static readonly FOOT: int32 = 9002;
/** The "footUS" unit code (length) */
public static readonly FOOT_US: int32 = 9003;
/** The "radian" unit code (angle) */
public static readonly RADIAN: int32 = 9101;
/** The "degree" unit code (angle) */
public static readonly DEGREE: int32 = 9102;
/** The "unity" unit code (scale) */
public static readonly UNITY: int32 = 9201;
/** Define the custom unit 'sexagesimal DMS' (9110) */
private static readonly _UNIT_DMS: int32 = 9110;
/** Define the custom unit 'sexagesimal DM' (9111) */
private static readonly _UNIT_DM: int32 = 9111;
/** The type of units for angles */
private static readonly _TYPE_ANGLE: string = "angle";
/** The type of units for length */
private static readonly _TYPE_LENGTH: string = "length";
/** The type of units for time */
private static readonly _TYPE_TIME: string = "time";
/** The type of units for scale */
private static readonly _TYPE_SCALE: string = "scale";
/** The code */
private _code: int32;
/** The name */
private _name: string;
/** The abbreviation */
private _abbreviation: string;
/** The type */
private _type: string;
/** The target unit code */
private _targetUnitCode: int32;
/** The B factor */
private _b: float64;
/** The C factor */
private _c: float64;
/** The target unit */
private _target: Unit;
/**
* Create a new unit.
* @param code the code.
* @param name the name.
* @param abbreviation the abbreviation.
* @param type the type (LENGTH, ANGLE or SCALE).
* @param targetUnitCode the target unit code.
* @param b the B factor.
* @param c the C factor.
*/
public constructor(code: int32, name: string, abbreviation: string, type: string, targetUnitCode: int32, b: float64, c: float64) {
/* Store the parameters */
this._code = code;
this._name = name;
this._abbreviation = abbreviation;
this._type = type;
this._targetUnitCode = targetUnitCode;
this._b = b;
this._c = c;
/* Clear */
this._target = null;
}
/**
* Get the code.
* @return the code.
*/
public getCode(): int32 {
return this._code;
}
/**
* Get the name.
* @return the name.
*/
public getName(): string {
return this._name;
}
/**
* Get the abbreviation.
* @return the abbreviation.
*/
public getAbbreviation(): string {
return this._abbreviation;
}
/**
* Get the type (LENGTH, ANGLE or SCALE).
* @return the type.
*/
public getType(): string {
return this._type;
}
/**
* Check is this is an angle type.
* @return true for an angle type.
*/
public isTypeAngle(): boolean {
return (this._type === Unit._TYPE_ANGLE);
}
/**
* Check is this is a length type.
* @return true for a length type.
*/
public isTypeLength(): boolean {
return (this._type === Unit._TYPE_LENGTH);
}
/**
* Get the target unit code.
* @return the target unit code.
*/
public getTargetUnitCode(): int32 {
return this._targetUnitCode;
}
/**
* Get the B factor.
* @return the B factor.
*/
public getB(): float64 {
return this._b;
}
/**
* Get the C factor.
* @return the C factor.
*/
public getC(): float64 {
return this._c;
}
/**
* Check if another unit is compatible with this one.
* @param other the other unit.
* @return true if compatible.
*/
public isCompatible(other: Unit): boolean {
if (other._code == this._code) return true;
if (other._targetUnitCode != this._targetUnitCode) return false;
if (other._b != this._b) return false;
if (other._c != this._c) return false;
return true;
}
/**
* Convert to the standard unit.
* @param value the value.
* @return the standard value.
*/
public toStandard(value: float64): float64 {
/* Get the target unit */
if (this._target == null) this._target = Registry.getUnit(this._targetUnitCode);
/* Already standard ? */
if (this._code == this._targetUnitCode) return value;
/* Check for a custom unit */
else if (this._code == Unit._UNIT_DMS) return this._target.toStandard(Unit.dmsToDeg(value));
else if (this._code == Unit._UNIT_DM) return this._target.toStandard(Unit.dmToDeg(value));
/* Default to scale */
else return this._target.toStandard(value * this._b / this._c);
}
/**
* Convert to the standard unit.
* @param value the value.
* @return the standard value.
*/
public to(value: float64): float64 {
return this.toStandard(value);
}
/**
* Convert from the standard unit.
* @param value the standard value.
* @return the value.
*/
public fromStandard(value: float64): float64 {
/* Get the target unit */
if (this._target == null) this._target = Registry.getUnit(this._targetUnitCode);
/* Already standard ? */
if (this._code == this._targetUnitCode) return value;
/* Check for a custom unit */
else if (this._code == Unit._UNIT_DMS) return Unit.degToDms(this._target.fromStandard(value));
else if (this._code == Unit._UNIT_DM) return Unit.degToDm(this._target.fromStandard(value));
/* Default to scale */
else return this._target.fromStandard(value) * this._c / this._b;
}
/**
* Convert from the standard unit.
* @param value the standard value.
* @return the value.
*/
public from(value: float64): float64 {
return this.fromStandard(value);
}
/**
* Convert a sexagesimal DMS value to fractional degrees.
* @param dms the sexagesimal DMS value.
* @return the fractional degrees.
*/
public static dmsToDeg(dms: float64): float64 {
/* Make sure we have an non-negative number */
let neg: boolean = (dms < 0.0);
if (neg) dms *= -1.0;
/* Get the seconds */
let seconds: float64 = (10000.0 * dms);
let iseconds: int32 = Math.floor(seconds);
let fraction: float64 = (seconds - iseconds);
/* Catch rounding errors (like 6.1000 to 6.099999999999999) */
if (Math.abs(fraction - 1.0) < 1.0e-4) { iseconds++; fraction = 0.0; }
/* Get the degrees */
let deg: int32 = Numbers.divInt(iseconds, 10000);
ASystem.assertNot((deg < -360) || (deg > 360), "Invalid deg (" + deg + ") in DMS " + dms);
iseconds -= (deg * 10000);
/* Get the minutes */
let min: int32 = Numbers.divInt(iseconds, 100);
ASystem.assertNot((min < 0) || (min > 59), "Invalid min (" + min + ") in DMS " + dms);
iseconds -= (min * 100);
/* Get the seconds */
let sec: int32 = (iseconds);
ASystem.assertNot((sec < 0) || (sec > 59), "Invalid sec (" + sec + ") in DMS " + dms);
/* Check the fraction */
ASystem.assertNot((fraction < 0.0) || (fraction >= 1.0), "Invalid fraction (" + fraction + ") in DMS " + dms);
/* Convert to fractional degrees */
let fdeg: float64 = deg + (min / 60.0) + (sec / 3600.0) + (fraction / 3600.0);
if (neg) fdeg *= -1.0;
/* Return the degrees */
return fdeg;
}
/**
* Convert a sexagesimal DMS value to fractional radians.
* @param dms the sexagesimal DMS value.
* @return the fractional radians.
*/
public static dmsToRad(dms: float64): float64 {
return Unit.dmsToDeg(dms) * Math.PI / 180.0;
}
/**
* Convert fractional degrees to a sexagesimal DMS value.
* @param deg the fractional degrees.
* @return the sexagesimal DMS value.
*/
public static degToDms(deg: float64): float64 {
/* Make sure we have an non-negative number */
let neg: boolean = (deg < 0.0);
if (neg) deg *= -1.0;
/* Get the integer degrees */
let ideg: float64 = Math.floor(deg);
deg -= ideg;
/* Get the minutes */
deg *= 60.0;
let min: float64 = Math.floor(deg);
deg -= min;
/* Get the seconds */
deg *= 60.0;
let sec: float64 = deg;
/* Convert to DMS */
let dms: float64 = ideg + (min / 100.0) + (sec / 10000.0);
if (neg) dms *= -1.0;
/* Return the DMS */
return dms;
}
/**
* Convert a sexagesimal DM value to fractional degrees.
* @param dm the sexagesimal DM value.
* @return the fractional degrees.
*/
public static dmToDeg(dm: float64): float64 {
/* Make sure we have an non-negative number */
let neg: boolean = (dm < 0.0);
if (neg) dm *= -1.0;
/* Get the seconds */
let minutes: float64 = (100.0 * dm);
let iminutes: int32 = Math.floor(minutes);
let fraction: float64 = (minutes - iminutes);
/* Catch rounding errors (like 6.1000 to 6.099999999999999) */
if (Math.abs(fraction - 1.0) < 1.0e-4) { iminutes++; fraction = 0.0; }
/* Get the degrees */
let deg: int32 = Numbers.divInt(iminutes, 100);
ASystem.assertNot((deg < -180) || (deg > 180), "Invalid deg (" + deg + ") in DM " + dm);
/* Get the minutes */
let min: int32 = (iminutes) % 100;
ASystem.assertNot((min < 0) || (min > 59), "Invalid min (" + min + ") in DM " + dm);
/* Check the fraction */
ASystem.assertNot((fraction < 0.0) || (fraction >= 1.0), "Invalid fraction (" + fraction + ") in DM " + dm);
/* Convert to fractional degrees */
let fdeg: float64 = deg + (min / 60.0) + (fraction / 60.0);
if (neg) fdeg *= -1.0;
/* Return the degrees */
return fdeg;
}
/**
* Convert fractional degrees to a sexagesimal DM value.
* @param deg the fractional degrees.
* @return the sexagesimal DM value.
*/
public static degToDm(deg: float64): float64 {
/* Make sure we have an non-negative number */
let neg: boolean = (deg < 0.0);
if (neg) deg *= -1.0;
/* Get the integer degrees */
let ideg: float64 = Math.floor(deg);
deg -= ideg;
/* Get the minutes */
deg *= 60.0;
let min: float64 = deg;
/* Convert to DMS */
let dms: float64 = ideg + (min / 100.0);
if (neg) dms *= -1.0;
/* Return the DMS */
return dms;
}
} | the_stack |
import {
copyAssets,
setRepo,
getUserName,
getUserEmail,
setCommitAuthor,
getCommitMessage
} from '../src/git-utils';
import {getInputs} from '../src/get-inputs';
import {Inputs} from '../src/interfaces';
import {getWorkDirName, createDir} from '../src/utils';
import {CmdResult} from '../src/interfaces';
import * as exec from '@actions/exec';
import {cp, rm} from 'shelljs';
import path from 'path';
import fs from 'fs';
const testRoot = path.resolve(__dirname);
async function createTestDir(name: string): Promise<string> {
const date = new Date();
const unixTime = date.getTime();
return await getWorkDirName(`${unixTime}_${name}`);
}
beforeEach(() => {
jest.resetModules();
process.env['GITHUB_ACTOR'] = 'default-octocat';
process.env['GITHUB_REPOSITORY'] = 'owner/repo';
});
afterEach(() => {
delete process.env['GITHUB_ACTOR'];
delete process.env['GITHUB_REPOSITORY'];
});
describe('copyAssets', () => {
let gitTempDir = '';
(async (): Promise<void> => {
const date = new Date();
const unixTime = date.getTime();
gitTempDir = await getWorkDirName(`${unixTime}_git`);
})();
beforeAll(async () => {
await createDir(gitTempDir);
process.chdir(gitTempDir);
await exec.exec('git', ['init']);
});
test('copy assets from publish_dir to root, delete .github', async () => {
const publishDir = await createTestDir('src');
const destDir = await createTestDir('dst');
cp('-Rf', path.resolve(testRoot, 'fixtures/publish_dir_1'), publishDir);
cp('-Rf', gitTempDir, destDir);
await copyAssets(publishDir, destDir, '.github');
expect(fs.existsSync(path.resolve(destDir, '.github'))).toBeFalsy();
expect(fs.existsSync(path.resolve(destDir, 'index.html'))).toBeTruthy();
expect(fs.existsSync(path.resolve(destDir, 'assets/lib.css'))).toBeTruthy();
rm('-rf', publishDir, destDir);
});
test('copy assets from publish_dir to root, delete .github,main.js', async () => {
const publishDir = await createTestDir('src');
const destDir = await createTestDir('dst');
cp('-Rf', path.resolve(testRoot, 'fixtures/publish_dir_1'), publishDir);
cp('-Rf', gitTempDir, destDir);
await copyAssets(publishDir, destDir, '.github,main.js');
expect(fs.existsSync(path.resolve(destDir, '.github'))).toBeFalsy();
expect(fs.existsSync(path.resolve(destDir, 'index.html'))).toBeTruthy();
expect(fs.existsSync(path.resolve(destDir, 'main.js'))).toBeFalsy();
expect(fs.existsSync(path.resolve(destDir, 'assets/lib.css'))).toBeTruthy();
expect(fs.existsSync(path.resolve(destDir, 'assets/lib.js'))).toBeTruthy();
rm('-rf', publishDir, destDir);
});
test('copy assets from publish_dir to root, delete nothing', async () => {
const publishDir = await createTestDir('src');
const destDir = await createTestDir('dst');
cp('-Rf', path.resolve(testRoot, 'fixtures/publish_dir_root'), publishDir);
cp('-Rf', gitTempDir, destDir);
await copyAssets(publishDir, destDir, '');
expect(fs.existsSync(path.resolve(destDir, '.github'))).toBeTruthy();
expect(fs.existsSync(path.resolve(destDir, 'index.html'))).toBeTruthy();
expect(fs.existsSync(path.resolve(destDir, 'main.js'))).toBeTruthy();
expect(fs.existsSync(path.resolve(destDir, 'assets/lib.css'))).toBeTruthy();
expect(fs.existsSync(path.resolve(destDir, 'assets/lib.js'))).toBeTruthy();
rm('-rf', publishDir, destDir);
});
test('copy assets from root to root, delete .github', async () => {
const publishDir = await createTestDir('src');
const destDir = await createTestDir('dst');
cp('-Rf', path.resolve(testRoot, 'fixtures/publish_dir_root'), publishDir);
cp('-Rf', gitTempDir, destDir);
cp('-Rf', gitTempDir, publishDir);
await copyAssets(publishDir, destDir, '.github');
expect(fs.existsSync(path.resolve(destDir, '.github'))).toBeFalsy();
expect(fs.existsSync(path.resolve(destDir, 'index.html'))).toBeTruthy();
expect(fs.existsSync(path.resolve(destDir, 'assets/lib.css'))).toBeTruthy();
rm('-rf', publishDir, destDir);
});
test('copy assets from root to root, delete nothing', async () => {
const publishDir = await createTestDir('src');
const destDir = await createTestDir('dst');
cp('-Rf', path.resolve(testRoot, 'fixtures/publish_dir_root'), publishDir);
cp('-Rf', gitTempDir, destDir);
cp('-Rf', gitTempDir, publishDir);
await copyAssets(publishDir, destDir, '');
expect(fs.existsSync(path.resolve(destDir, '.github'))).toBeTruthy();
expect(fs.existsSync(path.resolve(destDir, 'index.html'))).toBeTruthy();
expect(fs.existsSync(path.resolve(destDir, 'assets/lib.css'))).toBeTruthy();
rm('-rf', publishDir, destDir);
});
test.todo('copy assets from root to subdir, delete .github');
test.todo('copy assets from root to subdir, delete .github,main.js');
test.todo('copy assets from root to subdir, delete nothing');
});
describe('setRepo()', () => {
test('throw error destination_dir should be a relative path', async () => {
process.env['INPUT_GITHUB_TOKEN'] = 'test_github_token';
process.env['INPUT_PUBLISH_BRANCH'] = 'gh-pages';
process.env['INPUT_PUBLISH_DIR'] = 'public';
process.env['INPUT_DESTINATION_DIR'] = '/subdir';
// process.env['INPUT_EXTERNAL_REPOSITORY'] = 'user/repo';
// process.env['INPUT_ALLOW_EMPTY_COMMIT'] = 'true';
// process.env['INPUT_KEEP_FILES'] = 'true';
// process.env['INPUT_FORCE_ORPHAN'] = 'true';
// process.env['INPUT_USER_NAME'] = 'username';
// process.env['INPUT_USER_EMAIL'] = 'github@github.com';
// process.env['INPUT_COMMIT_MESSAGE'] = 'feat: Add new feature';
// process.env['INPUT_FULL_COMMIT_MESSAGE'] = 'feat: Add new feature';
// process.env['INPUT_TAG_NAME'] = 'deploy-v1.2.3';
// process.env['INPUT_TAG_MESSAGE'] = 'Deployment v1.2.3';
// process.env['INPUT_DISABLE_NOJEKYLL'] = 'true';
// process.env['INPUT_CNAME'] = 'github.com';
process.env['INPUT_EXCLUDE_ASSETS'] = '.github';
const inps: Inputs = getInputs();
const remoteURL = 'https://x-access-token:pat@github.com/actions/pages.git';
const date = new Date();
const unixTime = date.getTime();
const workDir = await getWorkDirName(`${unixTime}`);
await expect(setRepo(inps, remoteURL, workDir)).rejects.toThrowError(
'destination_dir should be a relative path'
);
});
});
describe('getUserName()', () => {
test('get default git user name', () => {
const userName = '';
const test = getUserName(userName);
expect(test).toMatch('default-octocat');
});
test('get custom git user name', () => {
const userName = 'custom-octocat';
const test = getUserName(userName);
expect(test).toMatch(userName);
});
});
describe('getUserEmail()', () => {
test('get default git user email', () => {
const userEmail = '';
const test = getUserEmail(userEmail);
expect(test).toMatch('default-octocat@users.noreply.github.com');
});
test('get custom git user email', () => {
const userEmail = 'custom-octocat@github.com';
const test = getUserEmail(userEmail);
expect(test).toMatch(userEmail);
});
});
describe('setCommitAuthor()', () => {
let workDirName = '';
(async (): Promise<void> => {
const date = new Date();
const unixTime = date.getTime();
workDirName = await getWorkDirName(`${unixTime}`);
})();
beforeEach(async () => {
await createDir(workDirName);
process.chdir(workDirName);
await exec.exec('git', ['init']);
});
test('get default commit author', async () => {
const userName = '';
const userEmail = '';
const result: CmdResult = {
exitcode: 0,
output: ''
};
const options = {
listeners: {
stdout: (data: Buffer): void => {
result.output += data.toString();
}
}
};
await setCommitAuthor(userName, userEmail);
result.exitcode = await exec.exec('git', ['config', 'user.name'], options);
expect(result.output).toMatch('default-octocat');
result.exitcode = await exec.exec('git', ['config', 'user.email'], options);
expect(result.output).toMatch('default-octocat@users.noreply.github.com');
});
test('get custom commit author', async () => {
const userName = 'custom-octocat';
const userEmail = 'custom-octocat@github.com';
const result: CmdResult = {
exitcode: 0,
output: ''
};
const options = {
listeners: {
stdout: (data: Buffer): void => {
result.output += data.toString();
}
}
};
await setCommitAuthor(userName, userEmail);
result.exitcode = await exec.exec('git', ['config', 'user.name'], options);
expect(result.output).toMatch(userName);
result.exitcode = await exec.exec('git', ['config', 'user.email'], options);
expect(result.output).toMatch(userEmail);
});
test('throw error user_email is undefined', async () => {
const userName = 'custom-octocat';
const userEmail = '';
await expect(setCommitAuthor(userName, userEmail)).rejects.toThrowError(
'user_email is undefined'
);
});
test('throw error user_name is undefined', async () => {
const userName = '';
const userEmail = 'custom-octocat@github.com';
await expect(setCommitAuthor(userName, userEmail)).rejects.toThrowError(
'user_name is undefined'
);
});
});
describe('getCommitMessage()', () => {
test('get default message', () => {
const test = getCommitMessage('', '', '', 'actions/pages', 'commit_hash');
expect(test).toMatch('deploy: commit_hash');
});
test('get default message for external repository', () => {
const test = getCommitMessage(
'',
'',
'actions/actions.github.io',
'actions/pages',
'commit_hash'
);
expect(test).toMatch('deploy: actions/pages@commit_hash');
});
test('get custom message', () => {
const test = getCommitMessage('Custom msg', '', '', 'actions/pages', 'commit_hash');
expect(test).toMatch('Custom msg commit_hash');
});
test('get custom message for external repository', () => {
const test = getCommitMessage(
'Custom msg',
'',
'actions/actions.github.io',
'actions/pages',
'commit_hash'
);
expect(test).toMatch('Custom msg actions/pages@commit_hash');
});
test('get full custom message', () => {
const test = getCommitMessage('', 'Full custom msg', '', 'actions/pages', 'commit_hash');
expect(test).toMatch('Full custom msg');
});
test('get full custom message for external repository', () => {
const test = getCommitMessage(
'',
'Full custom msg',
'actions/actions.github.io',
'actions/pages',
'commit_hash'
);
expect(test).toMatch('Full custom msg');
});
}); | the_stack |
declare var global : any;
var win : any = null;
try {
win = window;
}
catch (e) {
win = global;
}
//some other modules might want access to the serialization meta data, expose it here
var TypeMap = win.__CerializeTypeMap = new (win as any).Map();
//type aliases for serialization functions
export type Serializer = (value : any) => any;
export type Deserializer = (value : any) => any;
export interface INewable<T> {
new (...args : any[]) : T;
}
export interface IEnum {
[enumeration : string] : any
}
export interface ISerializable {
Serialize? : (value : any) => any;
Deserialize? : (json : any, instance? : any) => any;
}
//convert strings like my_camel_string to myCamelString
export function CamelCase(str : string) : string {
var STRING_CAMELIZE_REGEXP = (/(\-|_|\.|\s)+(.)?/g);
return str.replace(STRING_CAMELIZE_REGEXP, function (match, separator, chr) : string {
return chr ? chr.toUpperCase() : '';
}).replace(/^([A-Z])/, function (match, separator, chr) : string {
return match.toLowerCase();
});
}
//convert strings like MyCamelString to my_camel_string
export function SnakeCase(str : string) : string {
var STRING_DECAMELIZE_REGEXP = (/([a-z\d])([A-Z])/g);
return str.replace(STRING_DECAMELIZE_REGEXP, '$1_$2').toLowerCase();
}
//convert strings like myCamelCase to my_camel_case
export function UnderscoreCase(str : string) : string {
var STRING_UNDERSCORE_REGEXP_1 = (/([a-z\d])([A-Z]+)/g);
var STRING_UNDERSCORE_REGEXP_2 = (/\-|\s+/g);
return str.replace(STRING_UNDERSCORE_REGEXP_1, '$1_$2').replace(STRING_UNDERSCORE_REGEXP_2, '_').toLowerCase();
}
//convert strings like my_camelCase to my-camel-case
export function DashCase(str : string) : string {
var STRING_DASHERIZE_REGEXP = (/([a-z\d])([A-Z])/g);
str = str.replace(/_/g, '-');
return str.replace(STRING_DASHERIZE_REGEXP, '$1-$2').toLowerCase();
}
function deserializeString(value : any) : string|string[] {
if (Array.isArray(value)) {
return value.map(function (element : any) {
return element && element.toString() || null;
});
}
else {
return value && value.toString() || null;
}
}
function deserializeNumber(value : any) : number|number[] {
if(Array.isArray(value)) {
return value.map(function(element : any) {
return parseFloat(element);
});
}
else {
return parseFloat(value);
}
}
function deserializeBoolean(value : any) : boolean|boolean[] {
if (Array.isArray(value)) {
return value.map(function (element : any) {
return Boolean(element);
});
}
else {
return Boolean(value);
}
}
function serializeString(value : any) : string|string[] {
if (Array.isArray(value)) {
return value.map(function (element : any) {
return element && element.toString() || null;
});
}
else {
return value && value.toString() || null;
}
}
function serializeNumber(value : any) : number|number[] {
if (Array.isArray(value)) {
return value.map(function (element : any) {
return parseInt(element);
});
}
else {
return parseInt(value);
}
}
function serializeBoolean(value : any) : boolean|boolean[] {
if (Array.isArray(value)) {
return value.map(function (element : any) {
return Boolean(element);
});
}
else {
return Boolean(value);
}
}
function getDeserializeFnForType(type : any) : Deserializer {
if (type === String) {
return deserializeString;
}
else if (type === Number) {
return deserializeNumber;
}
else if (type === Boolean) {
return deserializeBoolean;
}
else {
return type;
}
}
function getSerializeFnForType(type : any) : Serializer {
if (type === String) {
return serializeString;
}
else if (type === Number) {
return serializeNumber;
}
else if (type === Boolean) {
return serializeBoolean;
}
else {
return type;
}
}
//gets meta data for a key name, creating a new meta data instance
//if the input array doesn't already define one for the given keyName
function getMetaData(array : Array<MetaData>, keyName : string) : MetaData {
for (var i = 0; i < array.length; i++) {
if (array[i].keyName === keyName) {
return array[i];
}
}
array.push(new MetaData(keyName));
return array[array.length - 1];
}
//helper for grabbing the type and keyname from a multi-type input variable
function getTypeAndKeyName(keyNameOrType : string|Function|ISerializable, keyName : string) : {type : Function, key : string} {
var type : Function = null;
var key : string = null;
if (typeof keyNameOrType === "string") {
key = <string>keyNameOrType;
} else if (keyNameOrType && typeof keyNameOrType === "function" || typeof keyNameOrType === "object") {
type = <Function>keyNameOrType;
key = keyName;
}
return {key: key, type: type};
}
//todo instance.constructor.prototype.__proto__ === parent class, maybe use this?
//because types are stored in a JS Map keyed by constructor, serialization is not inherited by default
//keeping this seperate by default also allows sub classes to serialize differently than their parent
export function inheritSerialization(parentType : Function) : any {
return function (childType : Function) {
var parentMetaData : Array<MetaData> = TypeMap.get(parentType) || [];
var childMetaData : Array<MetaData> = TypeMap.get(childType) || [];
for (var i = 0; i < parentMetaData.length; i++) {
var keyName = parentMetaData[i].keyName;
if (!MetaData.hasKeyName(childMetaData, keyName)) {
childMetaData.push(MetaData.clone(parentMetaData[i]));
}
}
TypeMap.set(childType, childMetaData);
}
}
//an untyped serialization property annotation, gets existing meta data for the target or creates
//a new one and assigns the serialization key for that type in the meta data
export function serialize(target : any, keyName : string) : any {
if (!target || !keyName) return;
var metaDataList : Array<MetaData> = TypeMap.get(target.constructor) || [];
var metadata = getMetaData(metaDataList, keyName);
metadata.serializedKey = keyName;
TypeMap.set(target.constructor, metaDataList);
}
//an untyped deserialization property annotation, gets existing meta data for the target or creates
//a new one and assigns the deserialization key for that type in the meta data
export function deserialize(target : any, keyName : string) : any {
if (!target || !keyName) return;
var metaDataList : Array<MetaData> = TypeMap.get(target.constructor) || [];
var metadata = getMetaData(metaDataList, keyName);
metadata.deserializedKey = keyName;
TypeMap.set(target.constructor, metaDataList);
}
//this combines @serialize and @deserialize as defined above
export function autoserialize(target : any, keyName : string) : any {
if (!target || !keyName) return;
var metaDataList : Array<MetaData> = TypeMap.get(target.constructor) || [];
var metadata = getMetaData(metaDataList, keyName);
metadata.serializedKey = keyName;
metadata.deserializedKey = keyName;
TypeMap.set(target.constructor, metaDataList);
}
//We dont actually need the type to serialize but I like the consistency with deserializeAs which definitely does
//serializes a type using 1.) a custom key name, 2.) a custom type, or 3.) both custom key and type
export function serializeAs(keyNameOrType : string|Serializer|INewable<any>|ISerializable|IEnum, keyName? : string) : any {
if (!keyNameOrType) return;
var {key, type} = getTypeAndKeyName(keyNameOrType, keyName);
return function (target : any, actualKeyName : string) : any {
if (!target || !actualKeyName) return;
var metaDataList : Array<MetaData> = TypeMap.get(target.constructor) || [];
var metadata = getMetaData(metaDataList, actualKeyName);
metadata.serializedKey = (key) ? key : actualKeyName;
metadata.serializedType = type;
//this allows the type to be a stand alone function instead of a class
if (type !== Date && type !== RegExp && !TypeMap.get(type) && typeof type === "function") {
metadata.serializedType = <ISerializable>{
Serialize: getSerializeFnForType(type)
};
}
TypeMap.set(target.constructor, metaDataList);
};
}
//Supports serializing of dictionary-like map objects, ie: { x: {}, y: {} }
export function serializeIndexable(type : Serializer|INewable<any>|ISerializable, keyName? : string) : any {
if (!type) return;
return function (target : any, actualKeyName : string) : any {
if (!target || !actualKeyName) return;
var metaDataList : Array<MetaData> = TypeMap.get(target.constructor) || [];
var metadata = getMetaData(metaDataList, actualKeyName);
metadata.serializedKey = (keyName) ? keyName : actualKeyName;
metadata.serializedType = type;
metadata.indexable = true;
//this allows the type to be a stand alone function instead of a class
if (type !== Date && type !== RegExp && !TypeMap.get(type) && typeof type === "function") {
metadata.serializedType = <ISerializable>{
Serialize: getSerializeFnForType(type)
};
}
TypeMap.set(target.constructor, metaDataList);
};
}
//deserializes a type using 1.) a custom key name, 2.) a custom type, or 3.) both custom key and type
export function deserializeAs(keyNameOrType : string|Function|INewable<any>|ISerializable|IEnum, keyName? : string) : any {
if (!keyNameOrType) return;
var {key, type} = getTypeAndKeyName(keyNameOrType, keyName);
return function (target : any, actualKeyName : string) : any {
if (!target || !actualKeyName) return;
var metaDataList : Array<MetaData> = TypeMap.get(target.constructor) || [];
var metadata = getMetaData(metaDataList, actualKeyName);
metadata.deserializedKey = (key) ? key : actualKeyName;
metadata.deserializedType = type;
//this allows the type to be a stand alone function instead of a class
//todo maybe add an explicit date and regexp deserialization function here
if (!TypeMap.get(type) && type !== Date && type !== RegExp && typeof type === "function") {
metadata.deserializedType = <ISerializable>{
Deserialize: getDeserializeFnForType(type)
};
}
TypeMap.set(target.constructor, metaDataList);
};
}
//Supports deserializing of dictionary-like map objects, ie: { x: {}, y: {} }
export function deserializeIndexable(type : Function|INewable<any>|ISerializable, keyName? : string) : any {
if (!type) return;
var key = keyName;
return function (target : any, actualKeyName : string) : any {
if (!target || !actualKeyName) return;
var metaDataList : Array<MetaData> = TypeMap.get(target.constructor) || [];
var metadata = getMetaData(metaDataList, actualKeyName);
metadata.deserializedKey = (key) ? key : actualKeyName;
metadata.deserializedType = type;
metadata.indexable = true;
if (!TypeMap.get(type) && type !== Date && type !== RegExp && typeof type === "function") {
metadata.deserializedType = <ISerializable>{
Deserialize: getDeserializeFnForType(type)
};
}
TypeMap.set(target.constructor, metaDataList);
};
}
//serializes and deserializes a type using 1.) a custom key name, 2.) a custom type, or 3.) both custom key and type
export function autoserializeAs(keyNameOrType : string|Function|INewable<any>|ISerializable|IEnum, keyName? : string) : any {
if (!keyNameOrType) return;
var {key, type} = getTypeAndKeyName(keyNameOrType, keyName);
return function (target : any, actualKeyName : string) : any {
if (!target || !actualKeyName) return;
var metaDataList : Array<MetaData> = TypeMap.get(target.constructor) || [];
var metadata = getMetaData(metaDataList, actualKeyName);
var serialKey = (key) ? key : actualKeyName;
metadata.deserializedKey = serialKey;
metadata.deserializedType = type;
metadata.serializedKey = serialKey;
metadata.serializedType = getSerializeFnForType(type);
if (!TypeMap.get(type) && type !== Date && type !== RegExp && typeof type === "function") {
metadata.deserializedType = <ISerializable>{
Deserialize: getDeserializeFnForType(type)
};
}
TypeMap.set(target.constructor, metaDataList);
};
}
//Supports serializing/deserializing of dictionary-like map objects, ie: { x: {}, y: {} }
export function autoserializeIndexable(type : Function|INewable<any>|ISerializable, keyName? : string) : any {
if (!type) return;
var key = keyName;
return function (target : any, actualKeyName : string) : any {
if (!target || !actualKeyName) return;
var metaDataList : Array<MetaData> = TypeMap.get(target.constructor) || [];
var metadata = getMetaData(metaDataList, actualKeyName);
var serialKey = (key) ? key : actualKeyName;
metadata.deserializedKey = serialKey;
metadata.deserializedType = type;
metadata.serializedKey = serialKey;
metadata.serializedType = getSerializeFnForType(type);
metadata.indexable = true;
if (!TypeMap.get(type) && type !== Date && type !== RegExp && typeof type === "function") {
metadata.deserializedType = <ISerializable>{
Deserialize: getDeserializeFnForType(type)
};
}
TypeMap.set(target.constructor, metaDataList);
};
}
//helper class to contain serialization meta data for a property, each property
//in a type tagged with a serialization annotation will contain an array of these
//objects each describing one property
class MetaData {
public keyName : string; //the key name of the property this meta data describes
public serializedKey : string; //the target keyname for serializing
public deserializedKey : string; //the target keyname for deserializing
public serializedType : Function|INewable<any>|ISerializable; //the type or function to use when serializing this property
public deserializedType : Function|INewable<any>|ISerializable; //the type or function to use when deserializing this property
public indexable : boolean;
constructor(keyName : string) {
this.keyName = keyName;
this.serializedKey = null;
this.deserializedKey = null;
this.deserializedType = null;
this.serializedType = null;
this.indexable = false;
}
//checks for a key name in a meta data array
public static hasKeyName(metadataArray : Array<MetaData>, key : string) : boolean {
for (var i = 0; i < metadataArray.length; i++) {
if (metadataArray[i].keyName === key) return true;
}
return false;
}
//clone a meta data instance, used for inheriting serialization properties
public static clone(data : MetaData) : MetaData {
var metadata = new MetaData(data.keyName);
metadata.deserializedKey = data.deserializedKey;
metadata.serializedKey = data.serializedKey;
metadata.serializedType = data.serializedType;
metadata.deserializedType = data.deserializedType;
metadata.indexable = data.indexable;
return metadata;
}
}
//merges two primitive objects recursively, overwriting or defining properties on
//`instance` as they defined in `json`. Works on objects, arrays and primitives
function mergePrimitiveObjects(instance : any, json : any) : any {
if (!json) return instance; //if we dont have a json value, just use what the instance defines already
if (!instance) return json; //if we dont have an instance value, just use the json
//for each key in the input json we need to do a merge into the instance object
Object.keys(json).forEach(function (key : string) {
var transformedKey : string = key;
if (typeof deserializeKeyTransform === "function") {
transformedKey = deserializeKeyTransform(key);
}
var jsonValue : any = json[key];
var instanceValue : any = instance[key];
if (Array.isArray(jsonValue)) {
//in the array case we reuse the items that exist already where possible
//so reset the instance array length (or make it an array if it isnt)
//then call mergePrimitiveObjects recursively
instanceValue = Array.isArray(instanceValue) ? instanceValue : [];
instanceValue.length = jsonValue.length;
for (var i = 0; i < instanceValue.length; i++) {
instanceValue[i] = mergePrimitiveObjects(instanceValue[i], jsonValue[i]);
}
}
else if (jsonValue && typeof jsonValue === "object") {
if (!instanceValue || typeof instanceValue !== "object") {
instanceValue = {};
}
instanceValue = mergePrimitiveObjects(instanceValue, jsonValue);
}
else {
//primitive case, just use straight assignment
instanceValue = jsonValue;
}
instance[transformedKey] = instanceValue;
});
return instance;
}
//takes an array defined in json and deserializes it into an array that ist stuffed with instances of `type`.
//any instances already defined in `arrayInstance` will be re-used where possible to maintain referential integrity.
function deserializeArrayInto(source : Array<any>, type : Function|INewable<any>|ISerializable, arrayInstance : any) : Array<any> {
if (!Array.isArray(arrayInstance)) {
arrayInstance = new Array<any>(source.length);
}
//extend or truncate the target array to match the source array
arrayInstance.length = source.length;
for (var i = 0; i < source.length; i++) {
arrayInstance[i] = DeserializeInto(source[i], type, arrayInstance[i] || new (<any>type)());
}
return arrayInstance;
}
//takes an object defined in json and deserializes it into a `type` instance or populates / overwrites
//properties on `instance` if it is provided.
function deserializeObjectInto(json : any, type : Function|INewable<any>|ISerializable, instance : any) : any {
var metadataArray : Array<MetaData> = TypeMap.get(type);
//if we dont have an instance we need to create a new `type`
if (instance === null || instance === void 0) {
if (type) {
instance = new (<any>type)();
}
}
//if we dont have any meta data and we dont have a type to inflate, just merge the objects
if (instance && !type && !metadataArray) {
return mergePrimitiveObjects(instance, json);
}
//if we dont have meta data just bail out and keep what we have
if (!metadataArray) {
invokeDeserializeHook(instance, json, type);
return instance;
}
//for each property in meta data, try to hydrate that property with its corresponding json value
for (var i = 0; i < metadataArray.length; i++) {
var metadata = metadataArray[i];
//these are not the droids we're looking for (to deserialize), moving along
if (!metadata.deserializedKey) continue;
var serializedKey = metadata.deserializedKey;
if (metadata.deserializedKey === metadata.keyName) {
if (typeof deserializeKeyTransform === "function") {
serializedKey = deserializeKeyTransform(metadata.keyName);
}
}
var source = json[serializedKey];
if (source === void 0) continue;
var keyName = metadata.keyName;
//if there is a custom deserialize function, use that
if (metadata.deserializedType && typeof (<any>metadata.deserializedType).Deserialize === "function") {
instance[keyName] = (<any>metadata.deserializedType).Deserialize(source);
}
//in the array case check for a type, if we have one deserialize an array full of those,
//otherwise (thanks to @garkin) just do a generic array deserialize
else if (Array.isArray(source)) {
if (metadata.deserializedType) {
instance[keyName] = deserializeArrayInto(source, metadata.deserializedType, instance[keyName]);
} else {
instance[keyName] = deserializeArray(source, null);
}
}
//if the type is a Date, reconstruct a real date instance
else if ((typeof source === "string" || source instanceof Date) && metadata.deserializedType === Date.prototype.constructor) {
var deserializedDate = new Date(source);
if (instance[keyName] instanceof Date) {
instance[keyName].setTime(deserializedDate.getTime());
}
else {
instance[keyName] = deserializedDate;
}
}
//if the type is Regexp, do that
else if (typeof source === "string" && type === RegExp) {
instance[keyName] = new RegExp(source);
}
//if source exists and source is an object type, hydrate that type
else if (source && typeof source === "object") {
if (metadata.indexable) {
instance[keyName] = deserializeIndexableObjectInto(source, metadata.deserializedType, instance[keyName]);
}
else {
instance[keyName] = deserializeObjectInto(source, metadata.deserializedType, instance[keyName]);
}
}
//primitive case, just copy
else {
instance[keyName] = source;
}
}
//invoke our after deserialized callback if provided
invokeDeserializeHook(instance, json, type);
return instance;
}
//deserializes a bit of json into a `type`
export function Deserialize(json : any, type? : Function|INewable<any>|ISerializable) : any {
if (Array.isArray(json)) {
return deserializeArray(json, type);
}
else if (json && typeof json === "object") {
return deserializeObject(json, type);
}
else if ((typeof json === "string" || json instanceof Date) && type === Date.prototype.constructor) {
return new Date(json);
}
else if (typeof json === "string" && type === RegExp) {
return new RegExp(<string>json);
}
else {
return json;
}
}
//takes some json, a type, and a target object and deserializes the json into that object
export function DeserializeInto(source : any, type : Function|INewable<any>|ISerializable, target : any) : any {
if (Array.isArray(source)) {
return deserializeArrayInto(source, type, target || []);
}
else if (source && typeof source === "object") {
return deserializeObjectInto(source, type, target || new (<any>type)());
}
else {
return target || (type && new (<any>type)()) || null;
}
}
//deserializes an array of json into an array of `type`
function deserializeArray(source : Array<any>, type : Function|INewable<any>|ISerializable) : Array<any> {
var retn : Array<any> = new Array(source.length);
for (var i = 0; i < source.length; i++) {
retn[i] = Deserialize(source[i], type);
}
return retn;
}
function invokeDeserializeHook(instance : any, json : any, type : any) : void {
if (type && typeof(type).OnDeserialized === "function") {
type.OnDeserialized(instance, json);
}
}
function invokeSerializeHook(instance : any, json : any) : void {
if (typeof (instance.constructor).OnSerialized === "function") {
(instance.constructor).OnSerialized(instance, json);
}
}
//deserialize a bit of json into an instance of `type`
function deserializeObject(json : any, type : Function|INewable<any>|ISerializable) : any {
var metadataArray : Array<MetaData> = TypeMap.get(type);
//if we dont have meta data, just decode the json and use that
if (!metadataArray) {
var inst : any = null;
if (!type) {
inst = JSON.parse(JSON.stringify(json));
}
else {
inst = new (<any>type)(); //todo this probably wrong
invokeDeserializeHook(inst, json, type);
}
return inst;
}
var instance = new (<any>type)();
//for each tagged property on the source type, try to deserialize it
for (var i = 0; i < metadataArray.length; i++) {
var metadata = metadataArray[i];
if (!metadata.deserializedKey) continue;
var serializedKey = metadata.deserializedKey;
if (metadata.deserializedKey === metadata.keyName) {
if (typeof deserializeKeyTransform === "function") {
serializedKey = deserializeKeyTransform(metadata.keyName);
}
}
var source = json[serializedKey];
var keyName = metadata.keyName;
if (source === void 0) continue;
if (source === null) {
instance[keyName] = source;
}
//if there is a custom deserialize function, use that
else if (metadata.deserializedType && typeof (<any>metadata.deserializedType).Deserialize === "function") {
instance[keyName] = (<any>metadata.deserializedType).Deserialize(source);
}
//in the array case check for a type, if we have one deserialize an array full of those,
//otherwise (thanks to @garkin) just do a generic array deserialize
else if (Array.isArray(source)) {
instance[keyName] = deserializeArray(source, metadata.deserializedType || null);
}
else if ((typeof source === "string" || source instanceof Date) && metadata.deserializedType === Date.prototype.constructor) {
instance[keyName] = new Date(source);
}
else if (typeof source === "string" && metadata.deserializedType === RegExp) {
instance[keyName] = new RegExp(json);
}
else if (source && typeof source === "object") {
if (metadata.indexable) {
instance[keyName] = deserializeIndexableObject(source, metadata.deserializedType);
}
else {
instance[keyName] = deserializeObject(source, metadata.deserializedType);
}
}
else {
instance[keyName] = source;
}
}
invokeDeserializeHook(instance, json, type);
return instance;
}
function deserializeIndexableObject(source : any, type : Function | ISerializable) : any {
var retn : any = {};
//todo apply key transformation here?
Object.keys(source).forEach(function (key : string) {
retn[key] = deserializeObject(source[key], type);
});
return retn;
}
function deserializeIndexableObjectInto(source : any, type : Function | ISerializable, instance : any) : any {
//todo apply key transformation here?
Object.keys(source).forEach(function (key : string) {
instance[key] = deserializeObjectInto(source[key], type, instance[key]);
});
return instance;
}
//take an array and spit out json
function serializeArray(source : Array<any>, type? : Function|ISerializable) : Array<any> {
var serializedArray : Array<any> = new Array(source.length);
for (var j = 0; j < source.length; j++) {
serializedArray[j] = Serialize(source[j], type);
}
return serializedArray;
}
//take an instance of something and try to spit out json for it based on property annotaitons
function serializeTypedObject(instance : any, type? : Function|ISerializable) : any {
var json : any = {};
var metadataArray : Array<MetaData>;
if (type) {
metadataArray = TypeMap.get(type);
} else {
metadataArray = TypeMap.get(instance.constructor);
}
for (var i = 0; i < metadataArray.length; i++) {
var metadata = metadataArray[i];
if (!metadata.serializedKey) continue;
var serializedKey = metadata.serializedKey;
if (metadata.serializedKey === metadata.keyName) {
if (typeof serializeKeyTransform === "function") {
serializedKey = serializeKeyTransform(metadata.keyName);
}
}
var source = instance[metadata.keyName];
if (source === void 0) continue;
if (Array.isArray(source)) {
json[serializedKey] = serializeArray(source, metadata.serializedType || null);
}
else if (metadata.serializedType && typeof (<any>metadata.serializedType).Serialize === "function") {
//todo -- serializeIndexableObject probably isn't needed because of how serialize works
json[serializedKey] = (<any>metadata.serializedType).Serialize(source);
} else {
var value = Serialize(source);
if (value !== void 0) {
json[serializedKey] = value;
}
}
}
invokeSerializeHook(instance, json);
return json;
}
//take an instance of something and spit out some json
export function Serialize(instance : any, type? : Function|ISerializable) : any {
if (instance === null || instance === void 0) return null;
if (Array.isArray(instance)) {
return serializeArray(instance, type);
}
if (type && TypeMap.has(type)) {
return serializeTypedObject(instance, type);
}
if (instance.constructor && TypeMap.has(instance.constructor)) {
return serializeTypedObject(instance);
}
if (instance instanceof Date) {
return instance.toISOString();
}
if (instance instanceof RegExp) {
return instance.toString();
}
if (instance && typeof instance === 'object' || typeof instance === 'function') {
var keys = Object.keys(instance);
var json : any = {};
for (var i = 0; i < keys.length; i++) {
//todo this probably needs a key transform
json[keys[i]] = Serialize(instance[keys[i]]);
}
invokeSerializeHook(instance, json);
return json;
}
return instance;
}
export function GenericDeserialize<T>(json : any, type : INewable<T>) : T {
return <T>Deserialize(json, type);
}
export function GenericDeserializeInto<T>(json : any, type : INewable<T>, instance : T) : T {
return <T>DeserializeInto(json, type, instance);
}
//these are used for transforming keys from one format to another
var serializeKeyTransform : (key : string) => string = null;
var deserializeKeyTransform : (key : string) => string = null;
//setter for deserializing key transform
export function DeserializeKeysFrom(transform : (key : string) => string) {
deserializeKeyTransform = transform;
}
//setter for serializing key transform
export function SerializeKeysTo(transform : (key : string) => string) {
serializeKeyTransform = transform;
}
//this is kinda dumb but typescript doesnt treat enums as a type, but sometimes you still
//want them to be serialized / deserialized, this does the trick but must be called after
//the enum is defined.
export function SerializableEnumeration(e : IEnum) : void {
e.Serialize = function (x : any) : any {
return e[x];
};
e.Deserialize = function (x : any) : any {
return e[x];
};
}
//expose the type map
export {TypeMap as __TypeMap} | the_stack |
import { mat4, vec2 } from 'gl-matrix'
import { Point, Size, Rect, Corner } from './GeometryTypes'
import { round } from './LangUtil'
export type Vec2Like = vec2 | [ number, number ]
export const oppositeCorner: { [K in Corner]: Corner } = {
nw: 'se',
ne: 'sw',
sw: 'ne',
se: 'nw'
}
export function toVec2(point: Vec2Like | Point): vec2 {
if (isVec2(point)) {
return point
} else if (isPoint(point)) {
return vec2.fromValues(point.x, point.y)
} else {
return vec2.fromValues(point[0], point[1])
}
}
export function isVec2(obj: Vec2Like | Point | null | undefined): obj is vec2 {
return obj instanceof Float32Array && obj.length === 2
}
export function isVec2Like(obj: Vec2Like | Point | null | undefined): obj is Vec2Like {
return !!obj && obj['length'] === 2 && typeof obj[0] === 'number' && typeof obj[1] === 'number'
}
export function isPoint(obj: Vec2Like | Point | null | undefined): obj is Point {
return !!obj && typeof obj['x'] === 'number' && typeof obj['y'] === 'number'
}
export function roundVec2(vector: Vec2Like, fractionDigits: number = 0): vec2 {
return vec2.fromValues(
round(vector[0], fractionDigits),
round(vector[1], fractionDigits)
)
}
export function ceilVec2(vector: Vec2Like, fractionDigits: number = 0): vec2 {
return vec2.fromValues(Math.ceil(vector[0]), Math.ceil(vector[1]))
}
export function floorVec2(vector: Vec2Like, fractionDigits: number = 0): vec2 {
return vec2.fromValues(Math.floor(vector[0]), Math.floor(vector[1]))
}
export function movePoint(point: Vec2Like, direction: Vec2Like, factor = 1): vec2 {
return vec2.fromValues(point[0] + factor * direction[0], point[1] + factor * direction[1])
}
export function roundRect(rect: Rect, fractionDigits: number = 0): Rect {
return {
x: round(rect.x, fractionDigits),
y: round(rect.y, fractionDigits),
width: round(rect.width, fractionDigits),
height: round(rect.height, fractionDigits)
}
}
export function transformRect(rect: Rect, matrix: mat4): Rect {
const point1 = cornerPointOfRect(rect, 'nw')
vec2.transformMat4(point1, point1, matrix)
const point2 = cornerPointOfRect(rect, 'se')
vec2.transformMat4(point2, point2, matrix)
return rectFromPoints(point1, point2)
}
export function rectFromPoints(point1: Vec2Like, point2: Vec2Like): Rect {
return {
x: Math.min(point1[0], point2[0]),
y: Math.min(point1[1], point2[1]),
width: Math.abs(point1[0] - point2[0]),
height: Math.abs(point1[1] - point2[1])
}
}
export function rectFromCenterAndSize(center: Vec2Like, size: Size): Rect {
const width = Math.abs(size.width)
const height = Math.abs(size.height)
return {
x: center[0] - width / 2,
y: center[1] - height / 2,
width,
height
}
}
export function rectFromCornerPointAndSize(cornerPoint: Vec2Like, size: Size): Rect {
return {
x: size.width > 0 ? cornerPoint[0] : cornerPoint[0] + size.width,
y: size.height > 0 ? cornerPoint[1] : cornerPoint[1] + size.height,
width: Math.abs(size.width),
height: Math.abs(size.height)
}
}
export function centerOfRect(rect: Rect): vec2 {
return vec2.fromValues(rect.x + rect.width / 2, rect.y + rect.height / 2)
}
export function cornerPointOfRect(rect: Rect, corner: Corner): vec2 {
const x = (corner === 'nw' || corner === 'sw') ? rect.x : (rect.x + rect.width)
const y = (corner === 'nw' || corner === 'ne') ? rect.y : (rect.y + rect.height)
return vec2.fromValues(x, y)
}
/**
* Returns a rect covering `rect1` and `rect2`.
*/
export function boundsOfRects(rect1: Rect, rect2: Rect): Rect {
const rect1Right = rect1.x + rect1.width
const rect1Bottom = rect1.y + rect1.height
const rect2Right = rect2.x + rect2.width
const rect2Bottom = rect2.y + rect2.height
if (rect1.x <= rect2.x && rect1.y <= rect2.y && rect1Right >= rect2Right && rect1Bottom >= rect2Bottom) {
// rect1 covers rect2
return rect1
} else if (rect2.x <= rect1.x && rect2.y <= rect1.y && rect2Right >= rect1Right && rect2Bottom >= rect1Bottom) {
// rect2 covers rect1
return rect2
} else {
const minX = Math.min(rect1.x, rect2.x)
const minY = Math.min(rect1.y, rect2.y)
return {
x: minX,
y: minY,
width: Math.max(rect1Right, rect2Right) - minX,
height: Math.max(rect1Bottom, rect2Bottom) - minY,
}
}
}
export function boundsOfPoints(points: Vec2Like[]): Rect {
let minX = Number.POSITIVE_INFINITY
let minY = Number.POSITIVE_INFINITY
let maxX = Number.NEGATIVE_INFINITY
let maxY = Number.NEGATIVE_INFINITY
for (const point of points) {
if (point[0] < minX) minX = point[0]
if (point[0] > maxX) maxX = point[0]
if (point[1] < minY) minY = point[1]
if (point[1] > maxY) maxY = point[1]
}
return { x: minX, y: minY, width: maxX - minX, height: maxY - minY }
}
export function scaleSize(size: Size, factor: number): Size {
if (factor === 1) {
return size
} else {
return { width: size.width * factor, height: size.height * factor }
}
}
export function directionOfPoints(start: Vec2Like, end: Vec2Like): vec2 {
return vec2.fromValues(end[0] - start[0], end[1] - start[1])
}
export function intersectLines(start1: Vec2Like, direction1: Vec2Like, start2: Vec2Like, direction2: Vec2Like, outFactors: number[]) {
intersectLinesPlain(start1[0], start1[1], direction1[0], direction1[1], start2[0], start2[1], direction2[0], direction2[1], outFactors)
}
export function intersectLinesPlain(startX1: number, startY1: number, directionX1: number, directionY1: number,
startX2: number, startY2: number, directionX2: number, directionY2: number, outFactors: number[])
{
const f = directionX2*directionY1 - directionX1*directionY2
if (f == 0) {
outFactors[0] = NaN
outFactors[1] = NaN
} else {
const vX3 = startX2 - startX1, vY3 = startY2 - startY1
outFactors[0] = (directionX2*vY3 - vX3*directionY2)/f
outFactors[1] = (directionX1*vY3 - vX3*directionY1)/f
}
}
export function isPointInPolygon(point: Vec2Like, polygonPoints: Vec2Like[]): boolean {
// Original code from: https://github.com/substack/point-in-polygon/blob/master/index.js
// ray-casting algorithm based on
// http://www.ecse.rpi.edu/Homepages/wrf/Research/Short_Notes/pnpoly.html
let x = point[0], y = point[1]
let inside = false
for (let i = 0, j = polygonPoints.length - 1; i < polygonPoints.length; j = i++) {
const xi = polygonPoints[i][0], yi = polygonPoints[i][1]
const xj = polygonPoints[j][0], yj = polygonPoints[j][1]
const intersect = ((yi > y) != (yj > y)) &&
(x < (xj - xi) * (y - yi) / (yj - yi) + xi)
if (intersect) inside = !inside
}
return inside
}
export function squareDistanceOfPoints(point1: Vec2Like, point2: Vec2Like): number {
const distX = point2[0] - point1[0]
const distY = point2[1] - point1[1]
return distX*distX + distY*distY
}
/**
* Finds the nearest point on a line.
* Returns `null` if `lineDirection` is `[0, 0]`
*/
export function nearestPointOnLine(point: Vec2Like, lineStart: Vec2Like, lineDirection: Vec2Like): vec2 | null {
if (!lineDirection[0] && !lineDirection[1]) {
// This line is no real line -> Return `null` (and avoid division by 0)
return null
}
const [ bx, by ] = lineDirection
const vx = point[0] - lineStart[0]
const vy = point[1] - lineStart[1]
const t = (vx*bx + vy*by) / (bx*bx + by*by)
return vec2.fromValues(
lineStart[0] + t*bx,
lineStart[1] + t*by
)
}
/**
* Finds the nearest point on a polygon.
*/
export function nearestPointOnPolygon(point: Vec2Like, polygonPoints: Vec2Like[]): vec2 {
let minSquareDistance: number | null = null
let bx: number, by: number, vx: number, vy: number, t: number, nx: number, ny: number, distX: number, distY: number,
squareDistance: number, nearestX = 0, nearestY = 0, nearestSegmentIndex = 0
const polygonSize = polygonPoints.length
for (let i = 0, j = polygonSize - 1; i < polygonSize; j = i++) {
const segmentStart = polygonPoints[i]
const segmentEnd = polygonPoints[j]
bx = segmentEnd[0] - segmentStart[0]
by = segmentEnd[1] - segmentStart[1]
if (bx == 0 && by == 0) {
// This segment has the same start and end point
// -> Ignore it (and avoid division by 0)
continue
}
vx = point[0] - segmentStart[0]
vy = point[1] - segmentStart[1]
t = (vx*bx + vy*by) / (bx*bx + by*by)
if (t <= 0) {
nx = segmentStart[0]
ny = segmentStart[1]
} else if (t >= 1) {
nx = segmentEnd[0]
ny = segmentEnd[1]
} else {
nx = segmentStart[0] + t*bx
ny = segmentStart[1] + t*by
}
distX = point[0] - nx
distY = point[1] - ny
squareDistance = distX*distX + distY*distY
if (minSquareDistance == null || squareDistance < minSquareDistance) {
minSquareDistance = squareDistance
nearestX = nx
nearestY = ny
nearestSegmentIndex = i
}
}
return vec2.fromValues(nearestX, nearestY)
}
export function intersectLineWithPolygon(lineStart: Vec2Like, lineDirection: Vec2Like, polygonPoints: Vec2Like[]): number[] {
const result: number[] = []
const intersectOutFactors: number[] = []
const polygonSize = polygonPoints.length
for (let i = 0, j = polygonSize - 1; i < polygonSize; j = i++) {
const segmentStart = polygonPoints[i]
const segmentEnd = polygonPoints[j]
const segmentDirection = directionOfPoints(segmentStart, segmentEnd)
intersectLines(segmentStart, segmentDirection, lineStart, lineDirection, intersectOutFactors)
if (intersectOutFactors[0] >= 0 && intersectOutFactors[0] <= 1) {
// The line cuts this segment
result.push(intersectOutFactors[1])
}
}
result.sort(compareNumbers)
return result
}
function compareNumbers(a: number, b: number): number {
return a - b
}
/**
* Scales a rect to fit into a rotated version of itself (same center and same size).
*/
export function scaleRectToFitBorders(rectCenter: Vec2Like | Point, maxRectSize: Size, rotatedRectPolygon: Vec2Like[]): Rect {
if (isPoint(rectCenter)) {
rectCenter = toVec2(rectCenter)
}
let outFactors: number[]
outFactors = intersectLineWithPolygon(rectCenter, [maxRectSize.width / 2, maxRectSize.height / 2], rotatedRectPolygon)
let minFactor = outFactors.reduce((minFactor, factor) => Math.min(minFactor, Math.abs(factor)), 1)
outFactors = intersectLineWithPolygon(rectCenter, [maxRectSize.width / 2, -maxRectSize.height / 2], rotatedRectPolygon)
minFactor = outFactors.reduce((minFactor, factor) => Math.min(minFactor, Math.abs(factor)), minFactor)
return roundRect(rectFromCenterAndSize(rectCenter, scaleSize(maxRectSize, minFactor)))
} | the_stack |
import { Long } from 'bson';
import {
Channel,
ChannelMeta,
ChannelSession,
ChannelTemplate,
NormalChannelManageSession,
SetChannelMeta,
} from '../../channel';
import { Chat, Chatlog, ChatLogged, ChatType, KnownChatType } from '../../chat';
import { TalkSession } from '../client';
import { AsyncCommandResult, CommandResult, DefaultReq, KnownDataStatusCode } from '../../request';
import {
CreateRes,
ForwardRes,
GetTrailerRes,
MChatlogsRes,
MShipRes,
SetMetaRes,
ShipRes,
SyncMsgRes,
WriteRes,
} from '../../packet/chat';
import { ChatlogStruct, structToChatlog } from '../../packet/struct';
import { ChannelUser } from '../../user';
import { JsonUtil } from '../../util';
import { ChannelMetaType } from '../../channel/meta';
import { MediaKeyComponent, MediaMultiPost, MediaMultiPostEntry, MediaPost, MediaUploadForm } from '../../media';
import { ConnectionSession, LocoSecureLayer, LocoSession } from '../../network';
import { FixedReadStream, FixedWriteStream } from '../../stream';
import { newCryptoStore } from '../../crypto';
import * as NetSocket from '../../network/socket';
/**
* Default ChannelSession implementation
*/
export class TalkChannelSession implements ChannelSession {
private _channel: Channel;
private _session: TalkSession;
currentMsgId: number;
constructor(channel: Channel, session: TalkSession) {
this._channel = channel;
this._session = session;
this.currentMsgId = 0;
}
get session(): TalkSession {
return this._session;
}
async sendChat(chat: Chat | string, noSeen = true): AsyncCommandResult<Chatlog> {
if (typeof chat === 'string') {
chat = { type: KnownChatType.TEXT, text: chat } as Chat;
}
const data: DefaultReq = {
'chatId': this._channel.channelId,
'msgId': ++this.currentMsgId,
'type': chat.type,
'noSeen': noSeen,
};
if (chat.text) {
data['msg'] = chat.text;
}
if (chat.attachment) {
data['extra'] = JsonUtil.stringifyLoseless(chat.attachment);
}
const res = await this._session.request<WriteRes>('WRITE', data);
if (res.status === KnownDataStatusCode.SUCCESS) {
let chatlog: Chatlog;
if (res.chatLog) {
chatlog = structToChatlog(res.chatLog);
} else {
chatlog = {
...chat,
logId: res.logId,
prevLogId: res.prevId,
sendAt: res.sendAt,
sender: this._session.clientUser,
messageId: res.msgId,
};
}
return { status: res.status, success: true, result: chatlog };
} else {
return { status: res.status, success: false };
}
}
async forwardChat(chat: Chat, noSeen = true): AsyncCommandResult<Chatlog> {
const data: DefaultReq = {
'chatId': this._channel.channelId,
'msgId': ++this.currentMsgId,
'type': chat.type,
'noSeen': noSeen,
};
if (chat.text) {
data['msg'] = chat.text;
}
if (chat.attachment) {
data['extra'] = JsonUtil.stringifyLoseless(chat.attachment);
}
const res = await this._session.request<ForwardRes>('FORWARD', data);
if (res.status === KnownDataStatusCode.SUCCESS) {
return { success: true, status: res.status, result: structToChatlog(res.chatLog) };
} else {
return { success: false, status: res.status };
}
}
async deleteChat(chat: ChatLogged): Promise<{ success: boolean, status: number }> {
const { status } = (await this._session.request(
'DELETEMSG',
{
'chatId': this._channel.channelId,
'logId': chat.logId,
},
));
return {
success: status === KnownDataStatusCode.SUCCESS,
status,
};
}
async markRead(chat: ChatLogged): Promise<{ success: boolean, status: number }> {
const { status } = (await this._session.request(
'NOTIREAD',
{
'chatId': this._channel.channelId,
'watermark': chat.logId,
},
));
return {
success: status === KnownDataStatusCode.SUCCESS,
status,
};
}
async setMeta(type: ChannelMetaType, meta: ChannelMeta | string): AsyncCommandResult<SetChannelMeta> {
const res = await this._session.request<SetMetaRes>(
'SETMETA',
{
'chatId': this._channel.channelId,
'type': type,
'content': typeof meta === 'string' ? meta : meta.content,
},
);
if (res.status !== KnownDataStatusCode.SUCCESS) return { success: false, status: res.status };
return {
success: true,
status: res.status,
result: { ...res.meta },
};
}
async setPushAlert(flag: boolean): AsyncCommandResult {
const { status } = await this._session.request(
'UPDATECHAT',
{
'chatId': this._channel.channelId,
'pushAlert': flag,
},
);
return {
success: status === KnownDataStatusCode.SUCCESS,
status,
};
}
async inviteUsers(users: ChannelUser[]): AsyncCommandResult {
const { status } = await this._session.request(
'ADDMEM',
{
'chatId': this._channel.channelId,
'memberIds': users.map((user) => user.userId),
},
);
return {
success: status === KnownDataStatusCode.SUCCESS,
status,
};
}
syncChatList(endLogId: Long, startLogId: Long = Long.ZERO): AsyncIterableIterator<CommandResult<Chatlog[]>> {
let curLogId = startLogId;
let done = false;
return {
[Symbol.asyncIterator]() {
return this;
},
next: async () => {
if (done) return { done: true, value: null };
const res = await this._session.request<SyncMsgRes>(
'SYNCMSG',
{
'chatId': this._channel.channelId,
'cur': curLogId,
// Unknown
'cnt': 0,
'max': endLogId,
},
);
if (res.status !== KnownDataStatusCode.SUCCESS) {
done = true;
return { done: false, value: { status: res.status, success: false } };
} else if (res.isOK) {
done = true;
}
if (!res.chatLogs || res.chatLogs.length < 0 || curLogId.greaterThanOrEqual(endLogId)) {
return { done: true, value: null };
}
const result = res.chatLogs.map(structToChatlog);
curLogId = result[result.length - 1].logId;
return { done: false, value: { status: res.status, success: true, result } };
},
};
}
async getChatListFrom(startLogId: Long = Long.ZERO): AsyncCommandResult<Chatlog[]> {
const res = await this._session.request<MChatlogsRes>(
'MCHATLOGS',
{
'chatIds': [this._channel.channelId],
'sinces': [startLogId],
},
);
if (res.status !== KnownDataStatusCode.SUCCESS) return { success: false, status: res.status };
return { status: res.status, success: true, result: res.chatLogs.map(structToChatlog) };
}
async createTrailerSession(media: MediaKeyComponent, type: ChatType): AsyncCommandResult<ConnectionSession> {
const res = await this._session.request<GetTrailerRes>(
'GETTRAILER',
{
'k': media.key,
't': type,
},
);
if (res.status !== KnownDataStatusCode.SUCCESS) return { success: false, status: res.status };
return {
success: true,
status: res.status,
result: new LocoSession(
new LocoSecureLayer(
await NetSocket.createTCPSocket({ host: res.vh, port: res.p, keepAlive: true }),
await newCryptoStore(this._session.configuration.locoPEMPublicKey)
)
)
};
}
async downloadMedia(media: MediaKeyComponent, type: ChatType, offset = 0): AsyncCommandResult<FixedReadStream> {
const res = await this.createTrailerSession(media, type);
if (!res.success) return res;
const session = res.result;
const clientConfig = this._session.configuration;
const data = await session.request('DOWN', {
'k': media.key,
'c': this._channel.channelId,
'o': offset,
'rt': true,
'u': this._session.clientUser.userId,
'os': clientConfig.agent,
'av': clientConfig.appVersion,
'nt': clientConfig.netType,
'mm': clientConfig.mccmnc,
});
const size = data['s'] as number;
return {
status: KnownDataStatusCode.SUCCESS,
success: true,
result: new FixedReadStream(session.stream, size),
};
}
async downloadMediaThumb(media: MediaKeyComponent, type: ChatType, offset = 0): AsyncCommandResult<FixedReadStream> {
const res = await this.createTrailerSession(media, type);
if (!res.success) return res;
const session = res.result;
const clientConfig = this._session.configuration;
const data = await session.request('MINI', {
'k': media.key,
'c': this._channel.channelId,
'o': offset,
// These should be actual dimension of media.
// Seems like server doesn't care about it.
'w': 0,
'h': 0,
'u': this._session.clientUser.userId,
'os': clientConfig.agent,
'av': clientConfig.appVersion,
'nt': clientConfig.netType,
'mm': clientConfig.mccmnc,
});
const size = data['s'] as number;
return {
status: KnownDataStatusCode.SUCCESS,
success: true,
result: new FixedReadStream(session.stream, size),
};
}
async shipMedia(type: ChatType, form: MediaUploadForm): AsyncCommandResult<ShipRes> {
const res = await this._session.request<ShipRes>(
'SHIP',
{
'c': this._channel.channelId,
't': type,
's': form.size,
'cs': form.checksum,
'e': form.metadata.ext || '',
},
);
return { success: res.status === KnownDataStatusCode.SUCCESS, status: res.status, result: res };
}
async shipMultiMedia(type: ChatType, forms: MediaUploadForm[]): AsyncCommandResult<MShipRes> {
const res = await this._session.request<MShipRes>(
'MSHIP',
{
'c': this._channel.channelId,
't': type,
'sl': forms.map((form) => form.size),
'csl': forms.map((form) => form.checksum),
'el': forms.map((form) => form.metadata.ext || ''),
},
);
return { success: res.status === KnownDataStatusCode.SUCCESS, status: res.status, result: res };
}
async uploadMedia(type: ChatType, form: MediaUploadForm): AsyncCommandResult<MediaPost> {
const shipRes = await this.shipMedia(type, form);
if (!shipRes.success) return shipRes;
const mediaStream = new LocoSecureLayer(
await NetSocket.createTCPSocket({ host: shipRes.result.vh, port: shipRes.result.p, keepAlive: true }),
await newCryptoStore(this._session.configuration.locoPEMPublicKey),
);
const session = new LocoSession(mediaStream);
const clientConfig = this._session.configuration;
const reqData: DefaultReq = {
'k': shipRes.result.k,
's': form.size,
'f': form.metadata.name,
't': type,
'c': this._channel.channelId,
'mid': Long.ONE,
'ns': true,
'u': this._session.clientUser.userId,
'os': clientConfig.agent,
'av': clientConfig.appVersion,
'nt': clientConfig.netType,
'mm': clientConfig.mccmnc,
};
if (form.metadata.width) reqData['w'] = form.metadata.width;
if (form.metadata.height) reqData['h'] = form.metadata.height;
const postRes = await session.request('POST', reqData);
const offset = postRes['o'] as number;
return {
status: shipRes.status,
success: true,
result: {
stream: new FixedWriteStream(mediaStream, form.size),
offset,
async finish() {
let result: CommandResult<Chatlog> = { status: KnownDataStatusCode.OPERATION_DENIED, success: false };
for await (const { method, data } of session.listen()) {
if (method === 'COMPLETE') {
if (data.status === KnownDataStatusCode.SUCCESS) {
const chatlog = structToChatlog(data['chatLog'] as ChatlogStruct);
result = { status: data.status, success: true, result: chatlog };
}
break;
}
}
if (!mediaStream.ended) mediaStream.close();
return result;
}
}
};
}
async uploadMultiMedia(type: ChatType, forms: MediaUploadForm[]): AsyncCommandResult<MediaMultiPost> {
const shipRes = await this.shipMultiMedia(type, forms);
if (!shipRes.success) return shipRes;
const res = shipRes.result;
const formIter = forms[Symbol.iterator]();
let i = 0;
const entryList: MediaMultiPostEntry[] = [];
const clientConfig = this._session.configuration;
const entries: AsyncIterableIterator<CommandResult<MediaMultiPostEntry>> = {
[Symbol.asyncIterator]() {
return this;
},
next: async (): Promise<IteratorResult<CommandResult<MediaMultiPostEntry>>> => {
const nextForm = formIter.next();
if (nextForm.done) return { done: true, value: null };
const form = nextForm.value;
const mediaStream = new LocoSecureLayer(
await NetSocket.createTCPSocket({ host: res.vhl[i], port: res.pl[i], keepAlive: true }),
await newCryptoStore(this._session.configuration.locoPEMPublicKey),
);
const session = new LocoSession(mediaStream);
const postRes = await session.request('MPOST', {
'k': res.kl[i],
's': form.size,
't': type,
'u': this._session.clientUser.userId,
'os': clientConfig.agent,
'av': clientConfig.appVersion,
'nt': clientConfig.netType,
'mm': clientConfig.mccmnc,
});
const result = {
offset: postRes['o'] as number,
stream: new FixedWriteStream(mediaStream, form.size),
async finish(): AsyncCommandResult {
for await (const { method, data } of session.listen()) {
if (method === 'COMPLETE') {
mediaStream.close();
return { status: data.status, success: data.status === KnownDataStatusCode.SUCCESS };
}
}
if (!mediaStream.ended) mediaStream.close();
return { status: KnownDataStatusCode.OPERATION_DENIED, success: false };
}
};
i++;
return {
done: false,
value: {
status: postRes.status,
success: true,
result
}
}
}
};
return {
status: shipRes.status,
success: true,
result: {
entries,
finish: async () => {
for (const entry of entryList) {
if (!entry.stream.ended) entry.stream.close();
}
return this.forwardChat({
text: '',
type,
attachment: {
kl: res.kl,
wl: forms.map((form) => form.metadata.width || 0),
hl: forms.map((form) => form.metadata.height || 0),
mtl: forms.map((form) => form.metadata.ext || ''),
sl: forms.map((form) => form.size),
imageUrls: [], thumbnailUrls: [],
thumbnailWidths: [], thumbnailHeights: [],
},
});
}
}
}
}
}
/**
* Default ChannelManageSession implementation.
*/
export class TalkChannelManageSession implements NormalChannelManageSession {
private _session: TalkSession;
constructor(session: TalkSession) {
this._session = session;
}
async createChannel(template: ChannelTemplate): AsyncCommandResult<Channel> {
const data: DefaultReq = {
'memberIds': template.userList.map((user) => user.userId),
};
if (template.name) data['nickName'] = template.name;
if (template.profileURL) data['profileImageUrl'] = template.profileURL;
const res = await this._session.request<CreateRes>('CREATE', data);
if (res.status !== KnownDataStatusCode.SUCCESS) return { status: res.status, success: false };
return { status: res.status, success: true, result: { channelId: res.chatId } };
}
async createMemoChannel(): AsyncCommandResult<Channel> {
const res = await this._session.request<CreateRes>('CREATE', { 'memberIds': [], 'memoChat': true });
if (res.status !== KnownDataStatusCode.SUCCESS) return { status: res.status, success: false };
return { status: res.status, success: true, result: { channelId: res.chatId } };
}
async leaveChannel(channel: Channel, block = false): AsyncCommandResult<Long> {
const res = await this._session.request(
'LEAVE',
{
'chatId': channel.channelId,
'block': block,
},
);
return {
status: res.status,
success: res.status === KnownDataStatusCode.SUCCESS,
result: res['lastTokenId'] as Long,
};
}
} | the_stack |
import * as FS from "fs";
import * as Path from "path";
import { exec, execSync } from "child_process";
import chalk from "chalk";
import { MIRAssembly, MIRObjectEntityTypeDecl, PackageConfig } from "../../compiler/mir_assembly";
import { MIREmitter } from "../../compiler/mir_emitter";
import { MIRInvokeKey } from "../../compiler/mir_ops";
import { Payload, SMTEmitter } from "./smtdecls_emitter";
import { BVEmitter, VerifierOptions } from "./smt_exp";
import { CodeFileInfo } from "../../ast/parser";
import { computeMaxConstantSize } from "../../compiler/mir_info";
const bosque_dir: string = Path.normalize(Path.join(__dirname, "../../../"));
const smtruntime_path = Path.join(bosque_dir, "bin/tooling/verifier/runtime/smtruntime.smt2");
const exepath = Path.normalize(Path.join(bosque_dir, "/build/output/chkworkflow" + (process.platform === "win32" ? ".exe" : "")));
const smtruntime = FS.readFileSync(smtruntime_path).toString();
const DEFAULT_TIMEOUT = 5000;
type CheckerResult = "unreachable" | "witness" | "possible" | "timeout";
//ocrresponds to 1a, 1b, 2a, 2b in paper
type ChkWorkflowOutcome = "unreachable" | "witness" | "partial" | "nosmall" | "exhaustive";
enum AssemblyFlavor
{
RecuriveImpl,
UFOverApproximate
}
function workflowLoadUserSrc(files: string[]): CodeFileInfo[] | undefined {
try {
let code: CodeFileInfo[] = [];
for (let i = 0; i < files.length; ++i) {
const realpath = Path.resolve(files[i]);
code.push({ fpath: realpath, filepath: files[i], contents: FS.readFileSync(realpath).toString() });
}
return code;
}
catch (ex) {
return undefined;
}
}
function workflowLoadCoreSrc(): CodeFileInfo[] | undefined {
try {
let code: CodeFileInfo[] = [];
const coredir = Path.join(bosque_dir, "bin/core/verify");
const corefiles = FS.readdirSync(coredir);
for (let i = 0; i < corefiles.length; ++i) {
const cfpath = Path.join(coredir, corefiles[i]);
code.push({ fpath: cfpath, filepath: corefiles[i], contents: FS.readFileSync(cfpath).toString() });
}
return code;
}
catch (ex) {
return undefined;
}
}
function generateMASM(usercode: CodeFileInfo[], entrypoint: string, asmflavor: AssemblyFlavor, enablesmall: boolean): { masm: MIRAssembly | undefined, errors: string[] } {
const corecode = workflowLoadCoreSrc() as CodeFileInfo[];
const code = [...corecode, ...usercode];
let namespace = "NSMain";
let entryfunc = "main";
const cpos = entrypoint.indexOf("::");
if(cpos === -1) {
entryfunc = entrypoint;
}
else {
namespace = entrypoint.slice(0, cpos);
entryfunc = entrypoint.slice(cpos + 2);
}
let macros: string[] = [];
if(asmflavor === AssemblyFlavor.UFOverApproximate)
{
macros.push("UF_APPROX");
}
if(enablesmall)
{
macros.push("SMALL_MODEL_ENABLED");
}
return MIREmitter.generateMASM(new PackageConfig(), "debug", macros, {namespace: namespace, names: [entryfunc]}, true, code);
}
function generateSMTPayload(masm: MIRAssembly, mode: "unreachable" | "witness" | "evaluate" | "invert", timeout: number, numgen: { int: BVEmitter, hash: BVEmitter }, vopts: VerifierOptions, errorTrgtPos: { file: string, line: number, pos: number }, entrypoint: MIRInvokeKey): Payload | undefined {
try {
return SMTEmitter.generateSMTPayload(masm, mode, timeout, smtruntime, vopts, numgen, errorTrgtPos, "__i__" + entrypoint);
} catch(e) {
process.stderr.write(chalk.red(`SMT generate error -- ${e}\n`));
return undefined;
}
}
function runVEvaluator(cpayload: object, workflow: "unreachable" | "witness" | "eval" | "invert"): string {
try {
const cmd = `${exepath} --${workflow}`;
return execSync(cmd, { input: JSON.stringify(cpayload, undefined, 2) }).toString().trim();
}
catch(ex) {
return JSON.stringify({result: "error", info: `${ex}`});
}
}
function computeNumGen(masm: MIRAssembly, desiredbv: number): BVEmitter | undefined {
const tuplemax = Math.max(...[0, ...[...masm.tupleDecls].map((tdcl) => tdcl[1].entries.length)]);
const recordmax = Math.max(...[0, ...[...masm.recordDecls].map((rdcl) => rdcl[1].entries.length)]);
const emax = Math.max(...[0, ...[...masm.entityDecls].map((edcl) => (edcl[1] instanceof MIRObjectEntityTypeDecl) ? edcl[1].fields.length : 0)]);
const elmax = Math.max(...[0, ...[...masm.ephemeralListDecls].map((tdcl) => tdcl[1].entries.length)]);
const consts = [...masm.invokeDecls].map((idcl) => computeMaxConstantSize(idcl[1].body.body));
let max = BigInt(Math.max(tuplemax, recordmax, emax, elmax));
for(let i = 0; i < consts.length; ++i) {
if(max < consts[i]) {
max = consts[i];
}
}
if(2n**(BigInt(desiredbv) - 1n) < max) {
return undefined;
}
else {
return BVEmitter.create(BigInt(desiredbv));
}
}
function runVEvaluatorAsync(cpayload: object, workflow: "unreachable" | "witness" | "eval" | "invert", cb: (result: string) => void) {
try {
const cmd = `${exepath} --${workflow}`;
const proc = exec(cmd, (err, stdout, stderr) => {
if(err) {
cb(JSON.stringify({result: "error", info: `${err}`}));
}
else {
cb(stdout.toString().trim());
}
});
proc.stdin.setDefaultEncoding('utf-8');
proc.stdin.write(JSON.stringify(cpayload, undefined, 2));
proc.stdin.write("\n");
proc.stdin.end();
}
catch(ex) {
cb(JSON.stringify({result: "error", info: `${ex}`}));
}
}
function workflowGetErrors(usercode: CodeFileInfo[], vopts: VerifierOptions, entrypoint: MIRInvokeKey): { file: string, line: number, pos: number, msg: string }[] | undefined {
try {
const { masm, errors } = generateMASM(usercode, entrypoint, AssemblyFlavor.UFOverApproximate, vopts.EnableCollection_SmallOps);
if(masm === undefined) {
process.stderr.write(chalk.red(`Compiler Errors!\n`));
process.stderr.write(JSON.stringify(errors, undefined, 2) + "\n");
return undefined;
}
else {
const simplenumgen = BVEmitter.create(BigInt(vopts.ISize));
const hashgen = BVEmitter.create(BigInt(512));
return SMTEmitter.generateSMTAssemblyAllErrors(masm, vopts, { int: simplenumgen, hash: hashgen}, "__i__" + entrypoint);
}
} catch(e) {
process.stdout.write(chalk.red(`SMT generate error -- ${e}\n`));
return undefined;
}
}
function workflowEmitToFile(into: string, usercode: CodeFileInfo[], asmflavor: AssemblyFlavor, mode: "unreachable" | "witness" | "evaluate" | "invert", timeout: number, vopts: VerifierOptions, errorTrgtPos: { file: string, line: number, pos: number }, entrypoint: MIRInvokeKey, smtonly: boolean) {
try {
const { masm, errors } = generateMASM(usercode, entrypoint, asmflavor, vopts.EnableCollection_SmallOps);
if(masm === undefined) {
process.stderr.write(chalk.red(`Compiler Errors!\n`));
process.stderr.write(JSON.stringify(errors, undefined, 2) + "\n");
process.exit(1);
}
else {
const numgen = computeNumGen(masm, vopts.ISize);
const hashgen = BVEmitter.create(BigInt(16));
if(numgen === undefined) {
process.stderr.write(chalk.red(`Constants larger than specified bitvector size (${vopts.ISize})!\n`));
process.exit(1);
}
const res = generateSMTPayload(masm, mode, timeout, {int: numgen, hash: hashgen}, vopts, errorTrgtPos, entrypoint);
if(res !== undefined) {
FS.writeFileSync(into, smtonly ? res.smt2decl : JSON.stringify(res, undefined, 2));
}
}
} catch(e) {
process.stdout.write(chalk.red(`SMT generate error -- ${e}\n`));
process.exit(1);
}
}
function workflowBSQInfeasibleSingle(usercode: CodeFileInfo[], vopts: VerifierOptions, timeout: number, errorTrgtPos: { file: string, line: number, pos: number }, entrypoint: MIRInvokeKey, cb: (result: string) => void) {
try {
const { masm } = generateMASM(usercode, entrypoint, AssemblyFlavor.UFOverApproximate, vopts.EnableCollection_SmallOps);
if(masm === undefined) {
cb(JSON.stringify({result: "error", info: "compile errors"}));
}
else {
const numgen = computeNumGen(masm, vopts.ISize);
const hashgen = BVEmitter.create(BigInt(16));
if(numgen === undefined) {
cb(JSON.stringify({result: "error", info: `constants larger than specified bitvector size (${vopts.ISize})`}));
return;
}
const res = generateSMTPayload(masm, "unreachable", timeout, {int: numgen, hash: hashgen}, vopts, errorTrgtPos, entrypoint);
if(res === undefined) {
cb(JSON.stringify({result: "error", info: "payload generation error"}));
return;
}
runVEvaluatorAsync(res, "unreachable", cb);
}
} catch(e) {
cb(JSON.stringify({result: "error", info: `${e}`}));
}
}
function workflowBSQWitnessSingle(usercode: CodeFileInfo[], vopts: VerifierOptions, timeout: number, errorTrgtPos: { file: string, line: number, pos: number }, entrypoint: MIRInvokeKey, cb: (result: string) => void) {
try {
const { masm } = generateMASM(usercode, entrypoint, AssemblyFlavor.RecuriveImpl, vopts.EnableCollection_SmallOps);
if(masm === undefined) {
cb(JSON.stringify({result: "error", info: "compile errors"}));
}
else {
const numgen = computeNumGen(masm, vopts.ISize);
const hashgen = BVEmitter.create(BigInt(16));
if(numgen === undefined) {
cb(JSON.stringify({result: "error", info: `constants larger than specified bitvector size (${vopts.ISize})`}));
return;
}
const res = generateSMTPayload(masm, "witness", timeout, {int: numgen, hash: hashgen}, vopts, errorTrgtPos, entrypoint);
if(res === undefined) {
cb(JSON.stringify({result: "error", info: "payload generation error"}));
return;
}
runVEvaluatorAsync(res, "witness", cb);
}
} catch(e) {
cb(JSON.stringify({result: "error", info: `${e}`}));
}
}
function wfInfeasibleSmall(usercode: CodeFileInfo[], timeout: number, errorTrgtPos: { file: string, line: number, pos: number }, entrypoint: MIRInvokeKey, printprogress: boolean, smallopsonly: boolean): {result: CheckerResult, time: number} | undefined {
const SMALL_MODEL = smallopsonly
? [
{ EnableCollection_SmallHavoc: true, EnableCollection_LargeHavoc: false, EnableCollection_SmallOps: true, EnableCollection_LargeOps: false, StringOpt: "ASCII" }
]
: [
{ EnableCollection_SmallHavoc: true, EnableCollection_LargeHavoc: false, EnableCollection_SmallOps: true, EnableCollection_LargeOps: true, StringOpt: "ASCII" },
{ EnableCollection_SmallHavoc: false, EnableCollection_LargeHavoc: true, EnableCollection_SmallOps: true, EnableCollection_LargeOps: true, StringOpt: "ASCII" },
];
const BV_SIZES = [
4,
8,
16
];
const start = Date.now();
for(let i = 0; i < BV_SIZES.length; ++i) {
if(printprogress) {
process.stderr.write(` Checking small (${BV_SIZES[i]}) bit width unreachability...\n`);
}
try {
const { masm } = generateMASM(usercode, entrypoint, AssemblyFlavor.UFOverApproximate, true /*EnableCollection_SmallOps always true*/);
if(masm === undefined) {
if(printprogress) {
process.stderr.write(` compile errors\n`);
}
return undefined;
}
else {
const numgen = computeNumGen(masm, BV_SIZES[i]);
const hashgen = BVEmitter.create(BigInt(16));
if(numgen === undefined) {
if(printprogress) {
process.stderr.write(` constants larger than specified bitvector size -- continuing checks...\n`);
}
}
else {
for (let j = 0; j < SMALL_MODEL.length; ++j) {
const vopts = { ISize: BV_SIZES[i], ...SMALL_MODEL[j] } as VerifierOptions;
if(printprogress) {
process.stderr.write(` Checking *${vopts.EnableCollection_LargeHavoc ? "mixed" : "small"}* inputs with *${vopts.EnableCollection_LargeOps ? "mixed" : "small"}* operations...\n`);
}
const res = generateSMTPayload(masm, "unreachable", timeout, {int: numgen, hash: hashgen}, vopts, errorTrgtPos, entrypoint);
if (res === undefined) {
if (printprogress) {
process.stderr.write(` payload generation errors\n`);
}
return undefined;
}
const cres = runVEvaluator(res, "unreachable");
const jres = JSON.parse(cres);
const rr = jres["result"];
if (rr === "unreachable") {
if (printprogress) {
process.stderr.write(` unreachable -- continuing checks...\n`);
}
}
else if (rr === "possible") {
if (printprogress) {
process.stderr.write(` possible -- moving on\n`);
}
const end = Date.now();
return { result: "possible", time: (end - start) / 1000 };
}
else if (rr === "timeout") {
if (printprogress) {
process.stderr.write(` timeout -- moving on\n`);
}
const end = Date.now();
return { result: "timeout", time: (end - start) / 1000 };
}
else {
if (printprogress) {
process.stderr.write(` error -- moving on\n`);
}
return undefined;
}
}
}
}
} catch(e) {
if(printprogress) {
process.stderr.write(` failure: ${e}\n`);
}
return undefined;
}
}
const end = Date.now();
return {result: "unreachable", time: (end - start) / 1000};
}
function wfWitnessSmall(usercode: CodeFileInfo[], timeout: number, errorTrgtPos: { file: string, line: number, pos: number }, entrypoint: MIRInvokeKey, printprogress: boolean, smallopsonly: boolean): {result: CheckerResult, time: number, input?: any} | undefined {
const SMALL_MODEL = smallopsonly
? [
{ EnableCollection_SmallHavoc: true, EnableCollection_LargeHavoc: false, EnableCollection_SmallOps: true, EnableCollection_LargeOps: false, StringOpt: "ASCII" }
]
: [
{ EnableCollection_SmallHavoc: true, EnableCollection_LargeHavoc: false, EnableCollection_SmallOps: true, EnableCollection_LargeOps: true, StringOpt: "ASCII" },
{ EnableCollection_SmallHavoc: false, EnableCollection_LargeHavoc: true, EnableCollection_SmallOps: true, EnableCollection_LargeOps: true, StringOpt: "ASCII" },
];
const BV_SIZES = [
3,
5,
8,
12,
16
];
const start = Date.now();
for(let i = 0; i < BV_SIZES.length; ++i) {
if(printprogress) {
process.stderr.write(` Looking for small (${BV_SIZES[i]}) bit width witnesses...\n`);
}
try {
const { masm } = generateMASM(usercode, entrypoint, AssemblyFlavor.UFOverApproximate, true /*EnableCollection_SmallOps always true*/);
if(masm === undefined) {
if(printprogress) {
process.stderr.write(` compile errors\n`);
}
return undefined;
}
else {
const numgen = computeNumGen(masm, BV_SIZES[i]);
const hashgen = BVEmitter.create(BigInt(16));
if(numgen === undefined) {
if(printprogress) {
process.stderr.write(` constants larger than specified bitvector size -- continuing checks...\n`);
}
}
else {
for (let j = 0; j < SMALL_MODEL.length; ++j) {
const vopts = { ISize: BV_SIZES[i], ...SMALL_MODEL[j] } as VerifierOptions;
if(printprogress) {
process.stderr.write(` Checking *${vopts.EnableCollection_LargeHavoc ? "mixed" : "small"}* inputs with *${vopts.EnableCollection_LargeOps ? "mixed" : "small"}* operations...\n`);
}
const res = generateSMTPayload(masm, "witness", timeout, {int: numgen, hash: hashgen}, vopts, errorTrgtPos, entrypoint);
if (res === undefined) {
if (printprogress) {
process.stderr.write(` payload generation errors\n`);
}
return undefined;
}
const cres = runVEvaluator(res, "witness");
const jres = JSON.parse(cres);
const rr = jres["result"];
if (rr === "unreachable") {
if (printprogress) {
process.stderr.write(` unreachable -- continuing checks...\n`);
}
}
else if (rr === "witness") {
const end = Date.now();
const witness = jres["input"];
//
//Witness may be unreachable for some reason (Float <-> Real approx), depends on NAT_MAX not being 2^64, etc.
//May want to try executing it here to validate -- if it fails then we can return undefined?
//
if (printprogress) {
process.stderr.write(` witness\n`);
}
return { result: "witness", time: (end - start) / 1000, input: witness };
}
else if (rr === "timeout") {
if (printprogress) {
process.stderr.write(` timeout -- moving on\n`);
}
const end = Date.now();
return { result: "timeout", time: (end - start) / 1000 };
}
else {
if (printprogress) {
process.stderr.write(` error -- moving on\n`);
}
return undefined;
}
}
}
}
} catch(e) {
if(printprogress) {
process.stderr.write(` failure: ${e}\n`);
}
return undefined;
}
}
const end = Date.now();
return {result: "unreachable", time: (end - start) / 1000};
}
function wfInfeasibleLarge(usercode: CodeFileInfo[], timeout: number, errorTrgtPos: { file: string, line: number, pos: number }, entrypoint: MIRInvokeKey, printprogress: boolean): {result: CheckerResult, time: number} | undefined {
const start = Date.now();
const vopts = {
ISize: 64,
StringOpt: "ASCII",
EnableCollection_SmallHavoc: false,
EnableCollection_LargeHavoc: true,
EnableCollection_SmallOps: true,
EnableCollection_LargeOps: true
} as VerifierOptions;
if(printprogress) {
process.stderr.write(` Checking full (${64}) bit width uneachability...\n`);
}
try {
const { masm } = generateMASM(usercode, entrypoint, AssemblyFlavor.UFOverApproximate, vopts.EnableCollection_SmallOps);
if (masm === undefined) {
if (printprogress) {
process.stderr.write(` compile errors\n`);
}
return undefined;
}
else {
const numgen = BVEmitter.create(BigInt(64));
const hashgen = BVEmitter.create(BigInt(512));
const res = generateSMTPayload(masm, "unreachable", timeout, {int: numgen, hash: hashgen}, vopts, errorTrgtPos, entrypoint);
if (res === undefined) {
if (printprogress) {
process.stderr.write(` payload generation errors\n`);
}
return undefined;
}
const cres = runVEvaluator(res, "unreachable");
const jres = JSON.parse(cres);
const rr = jres["result"];
if (rr === "unreachable") {
if (printprogress) {
process.stderr.write(` unreachable\n`);
}
const end = Date.now();
return { result: "unreachable", time: (end - start) / 1000 };
}
else if (rr === "possible") {
if (printprogress) {
process.stderr.write(` possible\n`);
}
const end = Date.now();
return { result: "possible", time: (end - start) / 1000 };
}
else if (rr === "timeout") {
if (printprogress) {
process.stderr.write(` timeout\n`);
}
const end = Date.now();
return { result: "timeout", time: (end - start) / 1000 };
}
else {
if (printprogress) {
process.stderr.write(` error\n`);
}
return undefined;
}
}
} catch (e) {
if (printprogress) {
process.stderr.write(` failure: ${e}\n`);
}
return undefined;
}
}
function wfWitnessLarge(usercode: CodeFileInfo[], timeout: number, errorTrgtPos: { file: string, line: number, pos: number }, entrypoint: MIRInvokeKey, printprogress: boolean): {result: CheckerResult, time: number, input?: any} | undefined {
const start = Date.now();
const vopts = {
ISize: 64,
StringOpt: "ASCII",
EnableCollection_SmallHavoc: true,
EnableCollection_LargeHavoc: true,
EnableCollection_SmallOps: true,
EnableCollection_LargeOps: true
} as VerifierOptions;
if(printprogress) {
process.stderr.write(` Looking for full (${64}) bit width witness...\n`);
}
try {
const { masm } = generateMASM(usercode, entrypoint, AssemblyFlavor.RecuriveImpl, vopts.EnableCollection_SmallOps);
if (masm === undefined) {
if (printprogress) {
process.stderr.write(` compile errors\n`);
}
return undefined;
}
else {
const numgen = BVEmitter.create(BigInt(64));
const hashgen = BVEmitter.create(BigInt(512));
const res = generateSMTPayload(masm, "witness", timeout, {int: numgen, hash: hashgen}, vopts, errorTrgtPos, entrypoint);
if (res === undefined) {
if (printprogress) {
process.stderr.write(` payload generation errors\n`);
}
return undefined;
}
const cres = runVEvaluator(res, "witness");
const jres = JSON.parse(cres);
const rr = jres["result"];
if (rr === "unreachable") {
if (printprogress) {
process.stderr.write(` unreachable\n`);
}
const end = Date.now();
return { result: "unreachable", time: (end - start) / 1000 };
}
else if (rr === "witness") {
const end = Date.now();
const witness = jres["input"];
//
//Witness may be unreachable for some reason (Float <-> Real approx), depends on NAT_MAX not being 2^64, etc.
//May want to try executing it here to validate -- if it fails then we can return undefined?
//
if (printprogress) {
process.stderr.write(` witness\n`);
}
return { result: "witness", time: (end - start) / 1000, input: witness };
}
else if (rr === "timeout") {
if (printprogress) {
process.stderr.write(` timeout\n`);
}
const end = Date.now();
return { result: "timeout", time: (end - start) / 1000 };
}
else {
if (printprogress) {
process.stderr.write(` error\n`);
}
return undefined;
}
}
} catch (e) {
if (printprogress) {
process.stderr.write(` failure: ${e}\n`);
}
return undefined;
}
}
function workflowBSQCheck(usercode: CodeFileInfo[], timeout: number, errorTrgtPos: { file: string, line: number, pos: number }, entrypoint: MIRInvokeKey, printprogress: boolean): {result: ChkWorkflowOutcome, time: number, input?: any} | undefined {
if(printprogress) {
process.stderr.write(` Checking error at ${errorTrgtPos.file}@${errorTrgtPos.line}...\n`);
}
const start = Date.now();
const smis = wfInfeasibleSmall(usercode, timeout, errorTrgtPos, entrypoint, printprogress, true);
const smil = wfInfeasibleSmall(usercode, timeout, errorTrgtPos, entrypoint, printprogress, false);
if(smis === undefined || smil === undefined) {
if(printprogress) {
process.stderr.write(` blocked on small model unreachability -- moving on :(\n`);
}
return undefined;
}
let nosmallerror = false;
let nolimitederror = false;
if (smis.result !== "unreachable" || smil.result !== "unreachable") {
const smws = wfWitnessSmall(usercode, timeout, errorTrgtPos, entrypoint, printprogress, true);
nosmallerror = smws !== undefined && smws.result === "unreachable";
if (smws !== undefined && smws.result === "witness") {
if (printprogress) {
process.stderr.write(` witness input generation successful (1b)!\n`);
}
const end = Date.now();
return { result: "witness", time: (end - start) / 1000, input: smws.input };
}
const smwl = wfWitnessSmall(usercode, timeout, errorTrgtPos, entrypoint, printprogress, false);
nolimitederror = smwl !== undefined && smwl.result === "unreachable";
if (smwl !== undefined && smwl.result === "witness") {
if (printprogress) {
process.stderr.write(` witness input generation successful (1b)!\n`);
}
const end = Date.now();
return { result: "witness", time: (end - start) / 1000, input: smwl.input };
}
}
const lmr = wfInfeasibleLarge(usercode, timeout, errorTrgtPos, entrypoint, printprogress);
if(lmr !== undefined && lmr.result === "unreachable") {
if(printprogress) {
process.stderr.write(` unreachable on all inputs (1a)!\n`);
}
const end = Date.now();
return {result: "unreachable", time: (end - start) / 1000};
}
let lmw = wfWitnessLarge(usercode, timeout, errorTrgtPos, entrypoint, printprogress);
if(lmw !== undefined && lmw.result === "witness") {
if(printprogress) {
process.stderr.write(` witness input generation successful (1b)!\n`);
}
const end = Date.now();
return {result: "witness", time: (end - start) / 1000, input: lmw.input};
}
const end = Date.now();
const elapsed = (end - start) / 1000;
if (smis.result === "unreachable" || smil.result === "unreachable") {
if (printprogress) {
process.stderr.write(` unreachable on ${smis.result === "unreachable" ? "small" : "restricted"} inputs (2a)!\n`);
}
return { result: "partial", time: elapsed };
}
else if(nosmallerror || nolimitederror) {
if (printprogress) {
process.stderr.write(` unreachable on ${!nolimitederror ? "small" : "restricted"} inputs (2a)!\n`);
}
return { result: "nosmall", time: elapsed };
}
else {
if (printprogress) {
process.stderr.write(` exhastive exploration (2b)!\n`);
}
return { result: "exhaustive", time: elapsed };
}
}
function workflowEvaluateSingle(usercode: CodeFileInfo[], jin: any[], vopts: VerifierOptions, timeout: number, entrypoint: MIRInvokeKey, cb: (result: string) => void) {
try {
const { masm } = generateMASM(usercode, entrypoint, AssemblyFlavor.RecuriveImpl, true);
if(masm === undefined) {
cb(JSON.stringify({result: "error", info: "compile errors"}));
}
else {
const numgen = computeNumGen(masm, vopts.ISize);
const hashgen = BVEmitter.create(BigInt(16));
if(numgen === undefined) {
cb(JSON.stringify({result: "error", info: `constants larger than specified bitvector size (${vopts.ISize})`}));
return;
}
const res = generateSMTPayload(masm, "evaluate", timeout, {int: numgen, hash: hashgen}, vopts, { file: "[NO FILE]", line: -1, pos: -1 }, entrypoint);
if(res === undefined) {
cb(JSON.stringify({result: "error", info: "payload generation error"}));
return;
}
runVEvaluatorAsync({...res, jin: jin}, "eval", cb);
}
} catch(e) {
cb(JSON.stringify({result: "error", info: `${e}`}));
}
}
function workflowInvertSingle(usercode: CodeFileInfo[], jout: any, vopts: VerifierOptions, timeout: number, entrypoint: MIRInvokeKey, cb: (result: string) => void) {
try {
const { masm } = generateMASM(usercode, entrypoint, AssemblyFlavor.RecuriveImpl, true);
if(masm === undefined) {
cb(JSON.stringify({result: "error", info: "compile errors"}));
}
else {
const numgen = computeNumGen(masm, vopts.ISize);
const hashgen = BVEmitter.create(BigInt(16));
if(numgen === undefined) {
cb(JSON.stringify({result: "error", info: `constants larger than specified bitvector size (${vopts.ISize})`}));
return;
}
const res = generateSMTPayload(masm, "invert", timeout, {int: numgen, hash: hashgen}, vopts, { file: "[NO FILE]", line: -1, pos: -1 }, entrypoint);
if(res === undefined) {
cb(JSON.stringify({result: "error", info: "payload generation error"}));
return;
}
runVEvaluatorAsync({...res, jout: jout}, "invert", cb);
}
} catch(e) {
cb(JSON.stringify({result: "error", info: `${e}`}));
}
}
export {
workflowLoadUserSrc,
workflowGetErrors, workflowEmitToFile, workflowBSQInfeasibleSingle, workflowBSQWitnessSingle, workflowBSQCheck, workflowEvaluateSingle, workflowInvertSingle,
ChkWorkflowOutcome,
AssemblyFlavor, DEFAULT_TIMEOUT
}; | the_stack |
import { chimeeLog } from 'chimee-helper-log';
import { isArray, isFunction, isPlainObject, isString } from 'lodash';
import { accessor, alias, applyDecorators, before, nonenumerable, watch } from 'toxic-decorators';
import { isEmpty } from 'toxic-predicate-functions';
import { bind, getDeepProperty } from 'toxic-utils';
import VideoConfig from '../config/video';
import { isVideoDomAttribute, videoDomAttributes } from '../const/attribute';
import { ChimeeDomElement, isChimeeDomElement, RealChimeeDomElement, turnChimeeDomElementIntoRealChimeeDomElement } from '../const/dom';
import { domEvents } from '../const/event';
import { domMethods, kernelMethods, videoMethods } from '../const/method';
import { kernelProperties, videoReadOnlyProperties } from '../const/property';
import Dom from '../dispatcher/dom';
import Dispatcher from '../dispatcher/index';
import ChimeePlugin from '../dispatcher/plugin';
import { eventBinderCheck } from '../helper/checker';
import { IVideoKernelConstructor } from '../kernels/base';
import { ChimeePictureInPictureOnWindow } from '../plugin/picture-in-picture';
import { BinderTarget, EventOptions, SupportedKernelType, UserConfig, UserKernelsConfig, VesselConfig } from '../typings/base';
declare global {
// tslint:disable-next-line:interface-name
interface Window {
__chimee_picture_in_picture: ChimeePictureInPictureOnWindow;
}
}
export default class VideoWrapper {
@nonenumerable
get $container(): Element {
return this.dispatcher.dom.container;
}
@nonenumerable
get $pluginOrder(): string[] {
return this.dispatcher.order;
}
@nonenumerable
get $plugins(): { [id: string]: ChimeePlugin } {
return this.dispatcher.plugins;
}
@nonenumerable
get $video(): HTMLVideoElement {
return this.dispatcher.dom.videoElement;
}
@nonenumerable
get $wrapper(): Element {
return this.dispatcher.dom.wrapper;
}
get container(): VesselConfig {
return this.dispatcher.containerConfig;
}
set container(config: VesselConfig) {
if (!isPlainObject(config)) {
throw new Error(`The config of container must be Object, but not ${typeof config}.`);
}
Object.assign(this.dispatcher.containerConfig, config);
}
get currentTime(): number {
return this.dispatcher.kernel.currentTime;
}
set currentTime(second: number) {
this.dispatcher.binder.emitSync({
id: this.id,
name: 'seek',
target: 'video',
}, second);
}
protected dispatcher: Dispatcher;
get fullscreenElement(): Element | string | void {
return this.dispatcher.dom.fullscreenElement;
}
protected id: string;
@nonenumerable
get inPictureInPictureMode(): boolean {
return this.dispatcher.inPictureInPictureMode;
}
get isFullscreen(): boolean | string {
return this.dispatcher.dom.isFullscreen;
}
@nonenumerable
get pictureInPictureWindow(): void | ChimeePictureInPictureOnWindow {
return window.__chimee_picture_in_picture;
}
get videoRequireGuardedAttributes(): string[] {
return this.dispatcher.dom.videoRequireGuardedAttributes;
}
public autoload: boolean;
public autoplay: boolean;
public box: 'mp4' | 'hls' | 'flv' | '';
public readonly buffered: TimeRanges;
public readonly canPlayType: () => CanPlayTypeResult;
public readonly captureStream: () => void;
public changeWatchable: boolean;
public controls: boolean;
public readonly controlsList: boolean;
public crossOrigin: string;
public readonly currentSrc: string;
public readonly dataset: DOMStringMap;
public defaultMuted: boolean;
public defaultPlaybackRate: number;
public disableRemotePlayback: boolean;
public readonly duration: number;
public readonly ended: boolean;
public readonly error: MediaError;
public readonly exitFullscreen: Dom['exitFullscreen'];
public readonly focus: Dom['focus'];
public readonly fullscreen: Dom['fullscreen'];
public height: number;
public isLive: boolean;
// kernels 不在 videoConfig 上设置默认值,防止判断出错
public kernels: UserKernelsConfig;
public loop: boolean;
public muted: boolean;
public readonly networkState: number;
public readonly offsetHeight: number;
public readonly offsetLeft: number;
public readonly offsetParent: Element;
public readonly offsetTop: number;
public readonly offsetWidth: number;
public readonly pause: () => Promise<void>;
public readonly paused: boolean;
public readonly play: () => Promise<void>;
public playbackRate: number;
public playsInline: boolean;
public poster: string;
public preload: 'none' | 'auto' | 'metadata' | '';
public preset: {
[key in SupportedKernelType]?: IVideoKernelConstructor;
};
public presetConfig: {
[x: string]: object,
};
public readonly readyState: number;
public readonly requestFullscreen: Dom['requestFullscreen'];
public readonly seek: (n: number) => Promise<void>;
public readonly seekable: TimeRanges;
public readonly setSinkId: () => void;
public readonly sinkId: boolean;
public src: string;
public readonly startLoad: () => Promise<void>;
public readonly stopLoad: () => Promise<void>;
public readonly tabIndex: number;
public volume: number;
public width: number;
public x5VideoOrientation: 'landscape' | 'portrait' | undefined;
public x5VideoPlayerFullscreen: boolean;
public x5VideoPlayerType: 'h5' | undefined;
public xWebkitAirplay: boolean;
private events: { [evetName: string]: Array<(...args: any[]) => any> } = {};
private unwatchHandlers: Array<(...args: any[]) => any> = [];
constructor({ dispatcher, id }: { dispatcher?: Dispatcher, id: string }) {
if (dispatcher) {
this.dispatcher = dispatcher;
}
this.id = id;
}
/**
* set attr
* @param {string} element optional, default to be video, you can choose from video | container | wrapper
* @param {string} attribute the atrribue nameß
* @param {any} value optional, when it's no offer, we consider you want to get the attribute's value. When it's offered, we consider you to set the attribute's value, if the value you passed is undefined, that means you want to remove the value;
*/
@(alias('attr') as MethodDecorator)
public $attr(targetOrAttr: ChimeeDomElement | string, attrOrValue?: string, valueOrNothing?: string | void): string | void {
const { method, target, attr, value } = this.getRealInfoForStyleAndAttr(arguments.length, targetOrAttr, attrOrValue, valueOrNothing);
if (method === 'set' && target === 'videoElement') {
if (!this.dispatcher.videoConfigReady) {
/* istanbul ignore else */
if (process.env.NODE_ENV !== 'production') {
chimeeLog.warn('chimee', `${this.id} is tring to set attribute on video before video inited. Please wait until the inited event has benn trigger`);
}
return value;
}
if (isVideoDomAttribute(attr)) {
this.dispatcher.videoConfig[attr] = value;
return value;
}
}
return method === 'set'
? this.dispatcher.dom.setAttr(target, attr, value)
: this.dispatcher.dom.getAttr(target, attr);
}
/**
* set style
* @param {string} element optional, default to be video, you can choose from video | container | wrapper
* @param {string} attribute the atrribue name
* @param {any} value optional, when it's no offer, we consider you want to get the attribute's value. When it's offered, we consider you to set the attribute's value, if the value you passed is undefined, that means you want to remove the value;
*/
@(alias('css') as MethodDecorator)
public $css(targetOrAttr: ChimeeDomElement | string, attrOrValue?: string, valueOrNothing?: string | void): string | void {
const { method, target, attr, value } = this.getRealInfoForStyleAndAttr(arguments.length, targetOrAttr, attrOrValue, valueOrNothing);
return method === 'set'
? this.dispatcher.dom.setStyle(target, attr, value)
: this.dispatcher.dom.getStyle(target, attr);
}
public $del(obj: any, property: string) {
if (!isPlainObject(obj) && !isArray(obj)) { throw new TypeError(`$del only support Array or Object, but not ${obj}, whose type is ${typeof obj}`); }
if (!isFunction(obj.__del)) {
/* istanbul ignore else */
if (process.env.NODE_ENV !== 'production') { chimeeLog.warn('chimee', `${JSON.stringify(obj)} has not been deep watch. There is no need to use $del.`); }
delete obj[property];
return;
}
obj.__del(property);
}
/**
* emit an event
* @param {string} key event's name
* @param {...args} args
*/
@(alias('emit') as MethodDecorator)
public $emit(
key: string | {
name: string,
target: BinderTarget,
},
...args: any) {
let target: BinderTarget | void;
if (!isString(key) && isPlainObject(key) && isString(key.name) && isString(key.target)) {
target = key.target;
key = key.name;
}
if (!isString(key)) {
throw new TypeError('emit key parameter must be String');
}
/* istanbul ignore else */
if (process.env.NODE_ENV !== 'production' && domEvents.indexOf(key.replace(/^\w_/, '')) > -1) {
chimeeLog.warn('plugin', `You are try to emit ${key} event. As emit is wrapped in Promise. It make you can't use event.preventDefault and event.stopPropagation. So we advice you to use emitSync`);
}
return this.dispatcher.binder.emit({
id: this.id,
name: key,
target,
}, ...args);
}
/**
* emit a sync event
* @param {string} key event's name
* @param {...args} args
*/
@(alias('emitSync') as MethodDecorator)
public $emitSync(
key: string | {
name: string,
target: BinderTarget,
},
...args: any) {
let target: BinderTarget | void;
if (!isString(key) && isPlainObject(key) && isString(key.name) && isString(key.target)) {
target = key.target;
key = key.name;
}
if (!isString(key)) {
throw new TypeError('emitSync key parameter must be String');
}
return this.dispatcher.binder.emitSync({
id: this.id,
name: key,
target,
}, ...args);
}
/**
* call fullscreen api on some specific element
* @param {boolean} flag true means fullscreen and means exit fullscreen
* @param {string} element the element you want to fullscreen, default it's container, you can choose from video | container | wrapper
*/
@(alias('fullScreen') as MethodDecorator)
@(alias('$fullScreen') as MethodDecorator)
@(alias('fullscreen') as MethodDecorator)
public $fullscreen(flag: boolean = true, element: ChimeeDomElement = 'container'): boolean {
if (!this.dispatcher.binder.emitSync({
id: this.id,
name: 'fullscreen',
target: 'video-dom',
}, flag, element)) { return false; }
const result = this.dispatcher.dom.fullscreen(flag, turnChimeeDomElementIntoRealChimeeDomElement(element));
this.dispatcher.binder.triggerSync({
id: this.id,
name: 'fullscreen',
target: 'video-dom',
}, flag, element);
return result;
}
/**
* remove event handler through this function
* @param {string} key event's name
* @param {Function} fn event's handler
*/
@(alias('off') as MethodDecorator)
@(alias('removeEventListener') as MethodDecorator)
@before(eventBinderCheck)
public $off(key: string, fn: (...args: any[]) => any, options: EventOptions = {}) {
const eventInfo = Object.assign({}, options, {
fn,
id: this.id,
name: key,
});
this.dispatcher.binder.off(eventInfo);
this.removeEvents(key, fn);
}
/**
* bind event handler through this function
* @param {string} key event's name
* @param {Function} fn event's handler
*/
@(alias('on') as MethodDecorator)
@(alias('addEventListener') as MethodDecorator)
@before(eventBinderCheck)
public $on(key: string, fn: (...args: any[]) => any, options: EventOptions = {}) {
const eventInfo = Object.assign({}, options, {
fn,
id: this.id,
name: key,
});
this.dispatcher.binder.on(eventInfo);
// set on events as mark so that i can destroy it when i destroy
this.addEvents(key, fn);
}
/**
* bind one time event handler
* @param {string} key event's name
* @param {Function} fn event's handler
*/
@(alias('once') as MethodDecorator)
@before(eventBinderCheck)
public $once(key: string, fn: (...args: any[]) => any, options: EventOptions = {}) {
const self = this;
const boundFn = function(...args: any[]) {
bind(fn, this)(...args);
self.removeEvents(key, boundFn);
};
self.addEvents(key, boundFn);
const eventInfo = Object.assign({}, options, {
fn: boundFn,
id: this.id,
name: key,
});
this.dispatcher.binder.once(eventInfo);
}
public $set(obj: any, property: string | number, value: any) {
if (!isPlainObject(obj) && !isArray(obj)) { throw new TypeError(`$set only support Array or Object, but not ${obj}, whose type is ${typeof obj}`); }
if (!isFunction(obj.__set)) {
/* istanbul ignore else */
if (process.env.NODE_ENV !== 'production') {
chimeeLog.warn('chimee', `${JSON.stringify(obj)} has not been deep watch. There is no need to use $set.`);
}
obj[property] = value;
return;
}
obj.__set(property, value);
}
@(alias('silentLoad') as MethodDecorator)
public $silentLoad(src: string, option: {
abort?: boolean,
bias?: number,
box?: string,
duration?: number,
immediate?: boolean,
increment?: number,
isLive?: boolean,
kernels?: UserKernelsConfig,
preset?: UserConfig['preset'],
repeatTimes?: number,
} = {}) {
return this.dispatcher.binder.emit({
id: this.id,
name: 'silentLoad',
target: 'video',
})
.then(() => {
return this.dispatcher.silentLoad(src, option);
}).then((result: any) => {
this.dispatcher.binder.trigger({
id: this.id,
name: 'silentLoad',
target: 'video',
}, result);
});
}
public $watch(key: string | string[], handler: (...args: any[]) => any, {
deep,
diff = true,
other,
proxy = false,
}: {
deep?: boolean,
diff?: boolean,
other?: any,
proxy?: boolean,
} = {}) {
if (!isString(key) && !isArray(key)) { throw new TypeError(`$watch only accept string and Array<string> as key to find the target to spy on, but not ${key}, whose type is ${typeof key}`); }
let watching = true;
const watcher = function(...args: any[]) {
if (watching && (!(this instanceof VideoConfig) || this.dispatcher.changeWatchable)) { bind(handler, this)(...args); }
};
const unwatcher = () => {
watching = false;
const index = this.unwatchHandlers.indexOf(unwatcher);
if (index > -1) { this.unwatchHandlers.splice(index, 1); }
};
const keys = isString(key)
? key.split('.')
: key;
const property = keys.pop();
const videoConfig = this.dispatcher.videoConfig;
const target = (
keys.length === 0 &&
!other &&
isVideoDomAttribute(property)
)
? videoConfig
: [ 'isFullscreen', 'fullscreenElement' ].indexOf(property) > -1
? this.dispatcher.dom
: getDeepProperty(other || this, keys, { throwError: true });
applyDecorators(target, {
[property]: watch(watcher, { deep, diff, proxy }),
}, { self: true });
this.unwatchHandlers.push(unwatcher);
return unwatcher;
}
public exitPictureInPicture() {
return this.dispatcher.binder.emit({
id: this.id,
name: 'leavepictureinpicture',
target: 'video',
});
}
public load(...args: any[]): Promise<void> {
return new Promise((resolve) => {
this.dispatcher.binder.once({
fn: resolve,
id: this.id,
name: '_load',
target: 'plugin',
});
this.dispatcher.binder.emit({
id: this.id,
name: 'load',
target: 'plugin',
}, ...args);
});
}
public requestPictureInPicture() {
return this.dispatcher.binder.emit({
id: this.id,
name: 'enterpictureinpicture',
target: 'video',
});
}
protected destroyVideoWrapper() {
this.unwatchHandlers.forEach((unwatcher) => unwatcher());
Object.keys(this.events)
.forEach((key) => {
if (!isArray(this.events[key])) { return; }
this.events[key].forEach((fn) => this.$off(key, fn));
});
delete this.events;
}
protected wrapAsVideo(videoConfig: VideoConfig) {
// bind video read only properties on instance, so that you can get info like buffered
videoReadOnlyProperties.forEach((key) => {
Object.defineProperty(this, key, {
configurable: false,
enumerable: false,
get() {
return this.dispatcher.dom.videoElement[key];
},
set: undefined,
});
});
// bind videoMethods like canplaytype on instance
videoMethods.forEach((key) => {
Object.defineProperty(this, key, {
configurable: false,
enumerable: false,
get() {
const video = this.dispatcher.dom.videoElement;
return bind(video[key], video);
},
set: undefined,
});
});
// bind video config properties on instance, so that you can just set src by this
const props = ([]).concat(kernelProperties).concat(videoDomAttributes)
.reduce((props: { [x: string]: Array<(...args: any[]) => any>}, key: keyof VideoConfig) => {
props[key] = [
accessor({
get() {
return videoConfig[key];
},
set(value: any) {
videoConfig[key] = value;
return value;
},
}),
nonenumerable,
];
return props;
}, {});
applyDecorators(this, props, { self: true });
kernelMethods.forEach((key) => {
Object.defineProperty(this, key, {
value(...args: any) {
return new Promise((resolve) => {
const id = this.id;
this.dispatcher.binder.once({
fn: resolve,
id,
name: '_' + key,
});
this.dispatcher.binder[/^(seek)$/.test(key) ? 'emitSync' : 'emit']({
id,
name: key,
target: 'video',
}, ...args);
});
},
configurable: true,
enumerable: false,
writable: true,
});
});
domMethods.forEach((key) => {
if (key === 'fullscreen') { return; }
Object.defineProperty(this, key, {
value(...args: any) {
return this.dispatcher.dom[key](...args);
},
configurable: true,
enumerable: false,
writable: true,
});
});
}
private addEvents(key: string, fn: (...args: any[]) => any) {
this.events[key] = this.events[key] || [];
this.events[key].push(fn);
}
private getRealInfoForStyleAndAttr(argumentsLength: number, targetOrAttr: ChimeeDomElement | string, attrOrValue?: string, valueOrNothing?: string | void): {
attr: string;
method: 'set' | 'get';
target: RealChimeeDomElement;
value: string | void;
} {
let method: 'set' | 'get';
let target: ChimeeDomElement;
let attr: string;
let value: string | void;
if (argumentsLength > 2) {
method = 'set';
target = (targetOrAttr as ChimeeDomElement);
attr = attrOrValue;
value = valueOrNothing;
} else if (argumentsLength === 2) {
if (isChimeeDomElement(targetOrAttr)) {
method = 'get';
target = targetOrAttr;
attr = attrOrValue;
} else {
method = 'set';
target = 'container';
attr = targetOrAttr;
value = attrOrValue;
}
} else if (argumentsLength === 1) {
method = 'get';
target = 'container';
attr = targetOrAttr;
} else {
throw new Error('You have to pass at least one argument to run $attr or $ css');
}
const realTarget = turnChimeeDomElementIntoRealChimeeDomElement(target);
return { attr, method, value, target: realTarget };
}
private removeEvents(key: string, fn: (...args: any[]) => any) {
if (isEmpty(this.events[key])) { return; }
const index = this.events[key].indexOf(fn);
if (index < 0) { return; }
this.events[key].splice(index, 1);
if (isEmpty(this.events[key])) { delete this.events[key]; }
}
} | the_stack |
import { expect } from 'chai';
import * as sinon from 'sinon';
import App from '../../../src/compose/app';
import {
CompositionStep,
CompositionStepAction,
} from '../../../src/compose/composition-steps';
import { Image } from '../../../src/compose/images';
import Network from '../../../src/compose/network';
import Service from '../../../src/compose/service';
import { ServiceComposeConfig } from '../../../src/compose/types/service';
import Volume from '../../../src/compose/volume';
import log from '../../../src/lib/supervisor-console';
const defaultContext = {
localMode: false,
availableImages: [] as Image[],
containerIds: {},
downloading: [] as string[],
};
function createApp({
services = [] as Service[],
networks = [] as Network[],
volumes = [] as Volume[],
isTarget = false,
appId = 1,
appUuid = 'appuuid',
} = {}) {
return new App(
{
appId,
appUuid,
services,
networks: networks.reduce(
(res, net) => ({ ...res, [net.name]: net }),
{},
),
volumes: volumes.reduce((res, vol) => ({ ...res, [vol.name]: vol }), {}),
},
isTarget,
);
}
async function createService(
{
appId = 1,
appUuid = 'appuuid',
serviceName = 'test',
commit = 'test-commit',
...conf
} = {} as Partial<ServiceComposeConfig>,
{ state = {} as Partial<Service>, options = {} as any } = {},
) {
const svc = await Service.fromComposeObject(
{
appId,
appUuid,
serviceName,
commit,
running: true,
...conf,
},
options,
);
// Add additonal configuration
for (const k of Object.keys(state)) {
(svc as any)[k] = (state as any)[k];
}
return svc;
}
function createImage(
{
appId = 1,
appUuid = 'appuuid',
dependent = 0,
name = 'test-image',
serviceName = 'test',
commit = 'test-commit',
...extra
} = {} as Partial<Image>,
) {
return {
appId,
appUuid,
commit,
dependent,
name,
serviceName,
...extra,
} as Image;
}
const expectSteps = (
action: CompositionStepAction,
steps: CompositionStep[],
min = 1,
max = min,
message = `Expected to find ${min} step(s) with action '${action}', instead found ${JSON.stringify(
steps.map((s) => s.action),
)}`,
) => {
const filtered = steps.filter((s) => s.action === action);
if (filtered.length < min || filtered.length > max) {
throw new Error(message);
}
return filtered;
};
function expectNoStep(action: CompositionStepAction, steps: CompositionStep[]) {
expectSteps(action, steps, 0, 0);
}
const defaultNetwork = Network.fromComposeObject('default', 1, 'appuuid', {});
describe('compose/app', () => {
before(() => {
// disable log output during testing
sinon.stub(log, 'debug');
sinon.stub(log, 'warn');
sinon.stub(log, 'info');
sinon.stub(log, 'event');
sinon.stub(log, 'success');
});
after(() => {
// Restore stubbed methods
sinon.restore();
});
describe('volume state behavior', () => {
it('should correctly infer a volume create step', () => {
// Setup current and target apps
const current = createApp();
const target = createApp({
volumes: [Volume.fromComposeObject('test-volume', 1, 'deadbeef')],
isTarget: true,
});
// Calculate the steps
const steps = current.nextStepsForAppUpdate(defaultContext, target);
// Check that a createVolume step has been created
const [createVolumeStep] = expectSteps('createVolume', steps);
expect(createVolumeStep)
.to.have.property('target')
.that.deep.includes({ name: 'test-volume' });
});
it('should correctly infer more than one volume create step', () => {
const current = createApp();
const target = createApp({
volumes: [
Volume.fromComposeObject('test-volume', 1, 'deadbeef'),
Volume.fromComposeObject('test-volume-2', 1, 'deadbeef'),
],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(defaultContext, target);
// Check that 2 createVolume steps are found
const createVolumeSteps = expectSteps('createVolume', steps, 2);
// Check that the steps contain the volumes without any order
// expectation
expect(
createVolumeSteps.filter(
(step: any) => step.target && step.target.name === 'test-volume',
),
).to.have.lengthOf(1);
expect(
createVolumeSteps.filter(
(step: any) => step.target && step.target.name === 'test-volume-2',
),
).to.have.lengthOf(1);
});
// We don't remove volumes until the end
it('should not infer a volume remove step when the app is still referenced', () => {
const current = createApp({
volumes: [
Volume.fromComposeObject('test-volume', 1, 'deadbeef'),
Volume.fromComposeObject('test-volume-2', 1, 'deadbeef'),
],
});
const target = createApp({
volumes: [Volume.fromComposeObject('test-volume-2', 1, 'deadbeef')],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(defaultContext, target);
expectNoStep('removeVolume', steps);
});
it('should correctly infer volume recreation steps', () => {
const current = createApp({
volumes: [Volume.fromComposeObject('test-volume', 1, 'deadbeef')],
});
const target = createApp({
volumes: [
Volume.fromComposeObject('test-volume', 1, 'deadbeef', {
labels: { test: 'test' },
}),
],
isTarget: true,
});
// First step should create a volume removal step
const stepsForRemoval = current.nextStepsForAppUpdate(
defaultContext,
target,
);
const [removalStep] = expectSteps('removeVolume', stepsForRemoval);
expect(removalStep)
.to.have.property('current')
.that.has.property('name')
.that.equals('test-volume');
expect(removalStep)
.to.have.property('current')
.that.has.property('appId')
.that.equals(1);
// we are assuming that after the execution steps the current state of the
// app will look like this
const intermediate = createApp({
volumes: [],
});
// This test is extra since we have already tested that the volume gets created
const stepsForCreation = intermediate.nextStepsForAppUpdate(
defaultContext,
target,
);
const [creationStep] = expectSteps('createVolume', stepsForCreation);
expect(creationStep)
.to.have.property('target')
.that.has.property('config')
.that.deep.includes({
labels: {
'io.balena.supervised': 'true',
'io.balena.app-uuid': 'deadbeef',
test: 'test',
},
});
});
it('should kill dependencies of a volume before changing config', async () => {
const current = createApp({
services: [
await createService({
composition: { volumes: ['test-volume:/data'] },
}),
],
volumes: [Volume.fromComposeObject('test-volume', 1, 'deadbeef')],
});
const target = createApp({
services: [
await createService({
composition: { volumes: ['test-volume:/data'] },
}),
],
volumes: [
Volume.fromComposeObject('test-volume', 1, 'deadbeef', {
labels: { test: 'test' },
}),
],
isTarget: true,
});
// Calculate steps
const steps = current.nextStepsForAppUpdate(defaultContext, target);
const [killStep] = expectSteps('kill', steps);
expect(killStep)
.to.have.property('current')
.that.deep.includes({ serviceName: 'test' });
});
it('should correctly infer to remove an app volumes when the app is being removed', async () => {
const current = createApp({
volumes: [Volume.fromComposeObject('test-volume', 1, 'deadbeef')],
});
const steps = await current.stepsToRemoveApp(defaultContext);
const [removeVolumeStep] = expectSteps('removeVolume', steps);
expect(removeVolumeStep).to.have.property('current').that.deep.includes({
name: 'test-volume',
});
});
it('should not output a kill step for a service which is already stopping when changing a volume', async () => {
const service = await createService({
composition: { volumes: ['test-volume:/data'] },
});
service.status = 'Stopping';
const current = createApp({
services: [service],
volumes: [Volume.fromComposeObject('test-volume', 1, 'deadbeef')],
});
const target = createApp({
services: [service],
volumes: [
Volume.fromComposeObject('test-volume', 1, 'deadbeef', {
labels: { test: 'test' },
}),
],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(defaultContext, target);
expectNoStep('kill', steps);
});
it('should generate the correct step sequence for a volume purge request', async () => {
const service = await createService({
appId: 1,
appUuid: 'deadbeef',
image: 'test-image',
composition: { volumes: ['db-volume:/data'] },
});
const volume = Volume.fromComposeObject('db-volume', 1, 'deadbeef');
const contextWithImages = {
...defaultContext,
...{
availableImages: [
createImage({
appId: service.appId,
name: 'test-image',
}),
],
},
};
// Temporarily set target services & volumes to empty, as in doPurge
const intermediateTarget = createApp({
services: [],
networks: [defaultNetwork],
isTarget: true,
});
// Generate initial state with one service & one volume
const current = createApp({
services: [service],
networks: [defaultNetwork],
volumes: [volume],
});
// Step 1: kill
const steps = current.nextStepsForAppUpdate(
contextWithImages,
intermediateTarget,
);
expectSteps('kill', steps);
// Step 2: noop (service is stopping)
service.status = 'Stopping';
const secondStageSteps = current.nextStepsForAppUpdate(
contextWithImages,
intermediateTarget,
);
expectSteps('noop', secondStageSteps);
expect(secondStageSteps).to.have.length(1);
// No steps, simulate container removal & explicit volume removal as in doPurge
const currentWithServiceRemoved = createApp({
services: [],
networks: [defaultNetwork],
volumes: [volume],
});
expect(
currentWithServiceRemoved.nextStepsForAppUpdate(
contextWithImages,
intermediateTarget,
),
).to.have.length(0);
// Simulate volume removal
const currentWithVolumesRemoved = createApp({
services: [],
networks: [defaultNetwork],
volumes: [],
});
// Step 3: createVolume
service.status = 'Running';
const target = createApp({
services: [service],
networks: [defaultNetwork],
volumes: [volume],
isTarget: true,
});
const recreateVolumeSteps = currentWithVolumesRemoved.nextStepsForAppUpdate(
contextWithImages,
target,
);
expect(recreateVolumeSteps).to.have.length(1);
expectSteps('createVolume', recreateVolumeSteps);
// Final step: start service
const currentWithVolumeRecreated = createApp({
services: [],
networks: [defaultNetwork],
volumes: [volume],
});
const createServiceSteps = currentWithVolumeRecreated.nextStepsForAppUpdate(
contextWithImages,
target,
);
expectSteps('start', createServiceSteps);
});
});
describe('network state behavior', () => {
it('should correctly infer a network create step', () => {
const current = createApp({ networks: [] });
const target = createApp({
networks: [Network.fromComposeObject('default', 1, 'deadbeef', {})],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(defaultContext, target);
const [createNetworkStep] = expectSteps('createNetwork', steps);
expect(createNetworkStep).to.have.property('target').that.deep.includes({
name: 'default',
});
});
it('should correctly infer a network remove step', () => {
const current = createApp({
networks: [
Network.fromComposeObject('test-network', 1, 'deadbeef', {}),
],
isTarget: true,
});
const target = createApp({ networks: [], isTarget: true });
const steps = current.nextStepsForAppUpdate(defaultContext, target);
const [removeNetworkStep] = expectSteps('removeNetwork', steps);
expect(removeNetworkStep).to.have.property('current').that.deep.includes({
name: 'test-network',
});
});
it('should correctly infer more than one network removal step', () => {
const current = createApp({
networks: [
Network.fromComposeObject('test-network', 1, 'deadbeef', {}),
Network.fromComposeObject('test-network-2', 1, 'deadbeef', {}),
],
isTarget: true,
});
const target = createApp({ networks: [], isTarget: true });
const steps = current.nextStepsForAppUpdate(defaultContext, target);
const [first, second] = expectSteps('removeNetwork', steps, 2);
expect(first).to.have.property('current').that.deep.includes({
name: 'test-network',
});
expect(second).to.have.property('current').that.deep.includes({
name: 'test-network-2',
});
});
it('should correctly infer a network recreation step', () => {
const current = createApp({
networks: [
Network.fromComposeObject('test-network', 1, 'deadbeef', {}),
],
});
const target = createApp({
networks: [
Network.fromComposeObject('test-network', 1, 'deadbeef', {
labels: { TEST: 'TEST' },
}),
],
isTarget: true,
});
const stepsForRemoval = current.nextStepsForAppUpdate(
defaultContext,
target,
);
const [removeStep] = expectSteps('removeNetwork', stepsForRemoval);
expect(removeStep)
.to.have.property('current')
.that.deep.includes({ name: 'test-network' });
// We assume that the intermediate state looks like this
const intermediate = createApp({
networks: [],
});
const stepsForCreation = intermediate.nextStepsForAppUpdate(
defaultContext,
target,
);
const [createNetworkStep] = expectSteps(
'createNetwork',
stepsForCreation,
1,
2, // The update will also generate a step for the default network but we don't care about that
);
expect(createNetworkStep)
.to.have.property('target')
.that.deep.includes({ name: 'test-network' });
expect(createNetworkStep)
.to.have.property('target')
.that.has.property('config')
.that.deep.includes({
labels: { TEST: 'TEST', 'io.balena.app-id': '1' },
});
});
it('should kill dependencies of networks before removing', async () => {
const current = createApp({
appUuid: 'deadbeef',
services: [
await createService({
appId: 1,
appUuid: 'deadbeef',
composition: { networks: ['test-network'] },
}),
],
networks: [
Network.fromComposeObject('test-network', 1, 'deadbeef', {}),
],
});
const target = createApp({
appUuid: 'deadbeef',
services: [await createService({ appUuid: 'deadbeef' })],
networks: [],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(defaultContext, target);
const [killStep] = expectSteps('kill', steps);
expect(killStep)
.to.have.property('current')
.that.deep.includes({ serviceName: 'test' });
});
it('should kill dependencies of networks before changing config', async () => {
const current = createApp({
services: [
await createService({
composition: { networks: ['test-network'] },
}),
],
networks: [Network.fromComposeObject('test-network', 1, 'appuuid', {})],
});
const target = createApp({
services: [
await createService({
composition: { networks: { 'test-network': {} } },
}),
],
networks: [
Network.fromComposeObject('test-network', 1, 'appuuid', {
labels: { test: 'test' },
}),
],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(defaultContext, target);
const [killStep] = expectSteps('kill', steps);
expect(killStep)
.to.have.property('current')
.that.deep.includes({ serviceName: 'test' });
// We shouldn't try to remove the network until we have gotten rid of the dependencies
expectNoStep('removeNetwork', steps);
});
it('should create the default network if it does not exist', () => {
const current = createApp({ networks: [] });
const target = createApp({ networks: [], isTarget: true });
const steps = current.nextStepsForAppUpdate(defaultContext, target);
// A default network should always be created
const [createNetworkStep] = expectSteps('createNetwork', steps);
expect(createNetworkStep)
.to.have.property('target')
.that.deep.includes({ name: 'default' });
});
it('should not create the default network if it already exists', () => {
const current = createApp({
networks: [Network.fromComposeObject('default', 1, 'deadbeef', {})],
});
const target = createApp({ networks: [], isTarget: true });
const steps = current.nextStepsForAppUpdate(defaultContext, target);
// The network should not be created again
expectNoStep('createNetwork', steps);
});
});
describe('service state behavior', () => {
it('should create a kill step for a service which is no longer referenced', async () => {
const current = createApp({
services: [
await createService({ appId: 1, serviceName: 'main' }),
await createService({ appId: 1, serviceName: 'aux' }),
],
networks: [defaultNetwork],
});
const target = createApp({
services: [await createService({ appId: 1, serviceName: 'main' })],
networks: [defaultNetwork],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(defaultContext, target);
const [killStep] = expectSteps('kill', steps);
expect(killStep)
.to.have.property('current')
.to.deep.include({ serviceName: 'aux' });
});
it('should emit a noop when a service which is no longer referenced is already stopping', async () => {
const current = createApp({
services: [
await createService(
{ serviceName: 'main' },
{ state: { status: 'Stopping' } },
),
],
});
const target = createApp({ services: [], isTarget: true });
const steps = current.nextStepsForAppUpdate(defaultContext, target);
expectSteps('noop', steps);
// Kill was already emitted for this service
expectNoStep('kill', steps);
});
it('should remove a dead container that is still referenced in the target state', async () => {
const current = createApp({
services: [
await createService(
{ serviceName: 'main' },
{ state: { status: 'Dead' } },
),
],
});
const target = createApp({
services: [await createService({ serviceName: 'main' })],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(defaultContext, target);
const [removeStep] = expectSteps('remove', steps);
expect(removeStep)
.to.have.property('current')
.to.deep.include({ serviceName: 'main' });
});
it('should remove a dead container that is not referenced in the target state', async () => {
const current = createApp({
services: [
await createService(
{ serviceName: 'main' },
{ state: { status: 'Dead' } },
),
],
});
const target = createApp({ services: [], isTarget: true });
const steps = current.nextStepsForAppUpdate(defaultContext, target);
const [removeStep] = expectSteps('remove', steps);
expect(removeStep)
.to.have.property('current')
.to.deep.include({ serviceName: 'main' });
});
it('should emit a noop when a service has an image downloading', async () => {
const current = createApp({ services: [] });
const target = createApp({
services: [
await createService({ image: 'main-image', serviceName: 'main' }),
],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(
{ ...defaultContext, ...{ downloading: ['main-image'] } },
target,
);
expectSteps('noop', steps);
expectNoStep('fetch', steps);
});
it('should emit an updateMetadata step when a service has not changed but the release has', async () => {
const current = createApp({
services: [
await createService({ serviceName: 'main', commit: 'old-release' }),
],
});
const target = createApp({
services: [
await createService({ serviceName: 'main', commit: 'new-release' }),
],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(defaultContext, target);
const [updateMetadataStep] = expectSteps('updateMetadata', steps);
expect(updateMetadataStep)
.to.have.property('current')
.to.deep.include({ serviceName: 'main', commit: 'old-release' });
expect(updateMetadataStep)
.to.have.property('target')
.to.deep.include({ serviceName: 'main', commit: 'new-release' });
});
it('should stop a container which has `running: false` as its target', async () => {
const current = createApp({
services: [await createService({ serviceName: 'main' })],
});
const target = createApp({
services: [
await createService({ running: false, serviceName: 'main' }),
],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(defaultContext, target);
const [stopStep] = expectSteps('stop', steps);
expect(stopStep)
.to.have.property('current')
.to.deep.include({ serviceName: 'main' });
});
it('should not try to start a container which has exited and has restart policy of no', async () => {
// Container is a "run once" type of service so it has exitted.
const current = createApp({
services: [
await createService(
{ composition: { restart: 'no' }, running: false },
{ state: { containerId: 'run_once' } },
),
],
});
// Now test that another start step is not added on this service
const target = createApp({
services: [
await createService(
{ composition: { restart: 'no' }, running: false },
{ state: { containerId: 'run_once' } },
),
],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(defaultContext, target);
expectNoStep('start', steps);
});
it('should recreate a container if the target configuration changes', async () => {
const contextWithImages = {
...defaultContext,
...{
availableImages: [
createImage({ appId: 1, serviceName: 'main', name: 'main-image' }),
],
},
};
const current = createApp({
services: [await createService({ appId: 1, serviceName: 'main' })],
// Default network was already created
networks: [defaultNetwork],
});
const target = createApp({
services: [
await createService({
appId: 1,
serviceName: 'main',
composition: { privileged: true },
}),
],
networks: [defaultNetwork],
isTarget: true,
});
// should see a 'stop'
const stepsToIntermediate = current.nextStepsForAppUpdate(
contextWithImages,
target,
);
const [killStep] = expectSteps('kill', stepsToIntermediate);
expect(killStep)
.to.have.property('current')
.that.deep.includes({ serviceName: 'main' });
// assume the intermediate step has already removed the app
const intermediate = createApp({
services: [],
// Default network was already created
networks: [defaultNetwork],
});
// now should see a 'start'
const stepsToTarget = intermediate.nextStepsForAppUpdate(
contextWithImages,
target,
);
const [startStep] = expectSteps('start', stepsToTarget);
expect(startStep)
.to.have.property('target')
.that.deep.includes({ serviceName: 'main' });
expect(startStep)
.to.have.property('target')
.that.has.property('config')
.that.deep.includes({ privileged: true });
});
it('should not start a container when it depends on a service which is being installed', async () => {
const availableImages = [
createImage({ appId: 1, serviceName: 'main', name: 'main-image' }),
createImage({ appId: 1, serviceName: 'dep', name: 'dep-image' }),
];
const contextWithImages = { ...defaultContext, ...{ availableImages } };
const current = createApp({
services: [
await createService(
{
running: false,
appId: 1,
serviceName: 'dep',
},
{
state: {
status: 'Installing',
containerId: 'dep-id',
},
},
),
],
networks: [defaultNetwork],
});
const target = createApp({
services: [
await createService({
appId: 1,
serviceName: 'main',
composition: {
depends_on: ['dep'],
},
}),
await createService({
appId: 1,
serviceName: 'dep',
}),
],
networks: [defaultNetwork],
isTarget: true,
});
const stepsToIntermediate = current.nextStepsForAppUpdate(
contextWithImages,
target,
);
// Only one start step and it should be that of the 'dep' service
const [startStep] = expectSteps('start', stepsToIntermediate);
expect(startStep)
.to.have.property('target')
.that.deep.includes({ serviceName: 'dep' });
// we now make our current state have the 'dep' service as started...
const intermediate = createApp({
services: [
await createService(
{ appId: 1, serviceName: 'dep' },
{ state: { containerId: 'dep-id' } },
),
],
networks: [defaultNetwork],
});
// we should now see a start for the 'main' service...
const stepsToTarget = intermediate.nextStepsForAppUpdate(
{ ...contextWithImages, ...{ containerIds: { dep: 'dep-id' } } },
target,
);
const [startMainStep] = expectSteps('start', stepsToTarget);
expect(startMainStep)
.to.have.property('target')
.that.deep.includes({ serviceName: 'main' });
});
it('should not create a start step when all that changes is a running state', async () => {
const contextWithImages = {
...defaultContext,
...{
availableImages: [
createImage({ appId: 1, name: 'main-image', serviceName: 'main' }),
],
},
};
const current = createApp({
services: [
await createService({ running: false, serviceName: 'main' }),
],
networks: [defaultNetwork],
});
const target = createApp({
services: [await createService({ serviceName: 'main' })],
networks: [defaultNetwork],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(contextWithImages, target);
// There should be no steps since the engine manages restart policy for stopped containers
expect(steps.length).to.equal(0);
});
it('should create a kill step when a service release has to be updated but the strategy is kill-then-download', async () => {
const contextWithImages = {
...defaultContext,
...{
availableImages: [
createImage({ appId: 1, name: 'main-image', serviceName: 'main' }),
],
},
};
const labels = {
'io.balena.update.strategy': 'kill-then-download',
};
const current = createApp({
services: [
await createService({
labels,
image: 'main-image',
serviceName: 'main',
commit: 'old-release',
}),
],
networks: [defaultNetwork],
});
const target = createApp({
services: [
await createService({
labels,
image: 'main-image-2',
serviceName: 'main',
commit: 'new-release',
}),
],
networks: [defaultNetwork],
isTarget: true,
});
const stepsToIntermediate = current.nextStepsForAppUpdate(
contextWithImages,
target,
);
const [killStep] = expectSteps('kill', stepsToIntermediate);
expect(killStep)
.to.have.property('current')
.that.deep.includes({ serviceName: 'main' });
// assume steps were applied
const intermediate = createApp({
services: [],
networks: [defaultNetwork],
});
const stepsToTarget = intermediate.nextStepsForAppUpdate(
contextWithImages,
target,
);
const [fetchStep] = expectSteps('fetch', stepsToTarget);
expect(fetchStep)
.to.have.property('image')
.that.deep.includes({ name: 'main-image-2' });
});
it('should not infer a kill step with the default strategy if a dependency is not downloaded', async () => {
const contextWithImages = {
...defaultContext,
...{
downloading: ['dep-image-2'], // The depended service image is being downloaded
availableImages: [
createImage({ appId: 1, name: 'main-image', serviceName: 'main' }),
createImage({ appId: 1, name: 'dep-image', serviceName: 'dep' }),
createImage({
appId: 1,
name: 'main-image-2',
serviceName: 'main',
}),
],
},
};
const current = createApp({
services: [
await createService({
image: 'main-image',
appId: 1,
serviceName: 'main',
commit: 'old-release',
composition: {
depends_on: ['dep'],
},
}),
await createService({
image: 'dep-image',
appId: 1,
serviceName: 'dep',
commit: 'old-release',
}),
],
networks: [defaultNetwork],
});
const target = createApp({
services: [
await createService({
image: 'main-image-2',
appId: 1,
serviceName: 'main',
commit: 'new-release',
composition: {
depends_on: ['dep'],
},
}),
await createService({
image: 'dep-image-2',
appId: 1,
serviceName: 'dep',
commit: 'new-release',
}),
],
networks: [defaultNetwork],
isTarget: true,
});
// No kill steps should be generated
const steps = current.nextStepsForAppUpdate(contextWithImages, target);
expectNoStep('kill', steps);
});
it('should create several kill steps as long as there are unmet dependencies', async () => {
const contextWithImages = {
...defaultContext,
...{
availableImages: [
createImage({ appId: 1, name: 'main-image', serviceName: 'main' }),
createImage({
appId: 1,
name: 'main-image-2',
serviceName: 'main',
}),
],
},
};
const current = createApp({
services: [
await createService({
image: 'main-image',
serviceName: 'main',
commit: 'old-release',
}),
],
networks: [defaultNetwork],
});
const target = createApp({
services: [
await createService({
image: 'main-image-2',
// new release as target
serviceName: 'main',
commit: 'new-release',
}),
],
networks: [defaultNetwork],
isTarget: true,
});
const stepsFirstTry = current.nextStepsForAppUpdate(
contextWithImages,
target,
);
const [killStep] = expectSteps('kill', stepsFirstTry);
expect(killStep)
.to.have.property('current')
.that.deep.includes({ serviceName: 'main' });
// if at first you don't succeed
const stepsSecondTry = current.nextStepsForAppUpdate(
contextWithImages,
target,
);
// Since current state has not changed, another kill step needs to be generated
const [newKillStep] = expectSteps('kill', stepsSecondTry);
expect(newKillStep)
.to.have.property('current')
.that.deep.includes({ serviceName: 'main' });
});
it('should create a kill step when a service config has to be updated but the strategy is kill-then-download', async () => {
const labels = {
'io.balena.update.strategy': 'kill-then-download',
};
const current = createApp({
services: [await createService({ labels })],
});
const target = createApp({
services: [
await createService({
labels,
composition: {
privileged: true,
},
}),
],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(defaultContext, target);
const [killStep] = expectSteps('kill', steps);
expect(killStep)
.to.have.property('current')
.that.deep.includes({ serviceName: 'test' });
});
it('should not start a service when a network it depends on is not ready', async () => {
const current = createApp({ networks: [defaultNetwork] });
const target = createApp({
services: [
await createService({
composition: { networks: ['test'] },
appId: 1,
}),
],
networks: [
defaultNetwork,
Network.fromComposeObject('test', 1, 'appuuid', {}),
],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(defaultContext, target);
const [createNetworkStep] = expectSteps('createNetwork', steps);
expect(createNetworkStep)
.to.have.property('target')
.that.deep.includes({ name: 'test' });
// service should not be created yet
expectNoStep('start', steps);
});
it('should create several kill steps as long as there are no unmet dependencies', async () => {
const current = createApp({
services: [
await createService({
appId: 1,
serviceName: 'one',
commit: 'old-release',
}),
await createService({
appId: 1,
serviceName: 'two',
commit: 'old-release',
}),
await createService({
appId: 1,
serviceName: 'three',
commit: 'old-release',
}),
],
});
const target = createApp({
services: [
await createService({
appId: 1,
serviceName: 'three',
commit: 'new-release',
}),
],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(defaultContext, target);
expectSteps('kill', steps, 2);
});
it('should not infer a kill step with the default strategy before all target images have been downloaded', async () => {
const contextWithImages = {
...defaultContext,
...{
downloading: ['other-image-2'], // One of the images is being downloaded
availableImages: [
createImage({ appId: 1, name: 'main-image', serviceName: 'main' }),
createImage({
appId: 1,
name: 'other-image',
serviceName: 'other',
}),
createImage({
appId: 1,
name: 'main-image-2',
serviceName: 'main',
}),
],
},
};
const current = createApp({
services: [
await createService({
image: 'main-image',
appId: 1,
serviceName: 'main',
commit: 'old-release',
}),
await createService({
image: 'other-image',
appId: 1,
serviceName: 'other',
commit: 'old-release',
}),
],
networks: [defaultNetwork],
});
const target = createApp({
services: [
await createService({
image: 'main-image-2',
appId: 1,
serviceName: 'main',
commit: 'new-release',
}),
await createService({
image: 'other-image-2',
appId: 1,
serviceName: 'other',
commit: 'new-release',
}),
],
networks: [defaultNetwork],
isTarget: true,
});
// No kill steps should be generated
const steps = current.nextStepsForAppUpdate(contextWithImages, target);
expectNoStep('kill', steps);
});
it('should not infer a start step before all target images have been downloaded', async () => {
const contextWithImages = {
...defaultContext,
...{
downloading: ['other-image'], // One of the images is being downloaded
availableImages: [
createImage({ appId: 1, name: 'main-image', serviceName: 'main' }),
],
},
};
const current = createApp({
services: [],
networks: [defaultNetwork],
});
const target = createApp({
services: [
await createService({
image: 'main-image',
appId: 1,
serviceName: 'main',
commit: 'new-release',
}),
await createService({
image: 'other-image',
appId: 1,
serviceName: 'other',
commit: 'new-release',
}),
],
networks: [defaultNetwork],
isTarget: true,
});
// No kill steps should be generated
const steps = current.nextStepsForAppUpdate(contextWithImages, target);
expectNoStep('start', steps);
});
it('should infer a start step only when target images have been downloaded', async () => {
const contextWithImages = {
...defaultContext,
...{
downloading: [], // One of the images is being downloaded
availableImages: [
createImage({ appId: 1, name: 'main-image', serviceName: 'main' }),
createImage({
appId: 1,
name: 'other-image',
serviceName: 'other',
}),
],
},
};
const current = createApp({
services: [],
networks: [defaultNetwork],
});
const target = createApp({
services: [
await createService({
image: 'main-image',
appId: 1,
serviceName: 'main',
commit: 'new-release',
}),
await createService({
image: 'other-image',
appId: 1,
serviceName: 'other',
commit: 'new-release',
}),
],
networks: [defaultNetwork],
isTarget: true,
});
// No kill steps should be generated
const steps = current.nextStepsForAppUpdate(contextWithImages, target);
expectSteps('start', steps, 2);
});
});
describe('image state behavior', () => {
it('should emit a fetch step when an image has not been downloaded for a service', async () => {
const current = createApp({ services: [] });
const target = createApp({
services: [await createService({ serviceName: 'main' })],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(defaultContext, target);
const [fetchStep] = expectSteps('fetch', steps);
expect(fetchStep)
.to.have.property('image')
.that.deep.includes({ serviceName: 'main' });
});
it('should not infer a fetch step when the download is already in progress', async () => {
const contextWithDownloading = {
...defaultContext,
...{
downloading: ['image2'],
},
};
const current = createApp({ services: [] });
const target = createApp({
services: [
await createService({ image: 'image2', serviceName: 'main' }),
],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(
contextWithDownloading,
target,
);
expectNoStep('fetch', steps);
});
it('should not infer a kill step with the default strategy if a dependency is not downloaded', async () => {
const current = createApp({
services: [await createService({ image: 'image1' })],
});
const target = createApp({
services: [await createService({ image: 'image2' })],
isTarget: true,
});
const steps = current.nextStepsForAppUpdate(defaultContext, target);
const [fetchStep] = expectSteps('fetch', steps);
expect(fetchStep)
.to.have.property('image')
.that.deep.includes({ name: 'image2' });
expectNoStep('kill', steps);
});
});
}); | the_stack |
import {
$el,
bind,
createElem,
getBooleanTypeAttr,
getNumTypeAttr,
getStrTypeAttr,
removeAttrs,
setCss,
setHtml,
siblings
} from '../../dom-utils';
import { moreThanOneNode } from '../../mixins';
import { type, validComps } from '../../utils';
import PREFIX from '../prefix';
interface Config {
config(
el: string
): {
events({ onClick, onChange }: CarouselEvents): void;
};
}
interface CarouselEvents {
// 点击幻灯片时触发,返回索引值
onClick?: (index: number) => void;
// 幻灯片切换时触发,目前激活的幻灯片的索引,原幻灯片的索引
onChange?: ([oldValue, value]: [number, number]) => void;
}
const AUTOPLAYSPEED = 2000;
const DURATION = 520;
class Carousel implements Config {
readonly VERSION: string;
readonly COMPONENTS: NodeListOf<Element>;
constructor() {
this.VERSION = 'v1.0';
this.COMPONENTS = $el('r-carousel', { all: true });
this._create(this.COMPONENTS);
}
public config(
el: string
): {
events({ onClick, onChange }: CarouselEvents): void;
} {
const target = $el(el) as HTMLElement;
validComps(target, 'carousel');
const { _attrs } = Carousel.prototype;
const { autoplay, autoplaySpeed, hoverPause } = _attrs(target);
return {
events({ onClick, onChange }: CarouselEvents) {
const elems = target.querySelectorAll(`.${PREFIX.carousel}-item`);
const LeftArrow = target.querySelector(`.${PREFIX.carousel}-arrow.left`);
const RightArrow = target.querySelector(`.${PREFIX.carousel}-arrow.right`);
const lastIndex = elems.length - 1;
const handleChange = (
siblingType: 'nextElementSibling' | 'previousElementSibling',
newSetElem: Element
) => {
const oldActiveElem = target.querySelector(
`.${PREFIX.carousel}-item.active`
)! as HTMLElement;
const activeElem = oldActiveElem[siblingType] || newSetElem;
const oldValue = Number(oldActiveElem.dataset['index']);
// @ts-ignore
const value = Number(activeElem.dataset['index']);
onChange && type.isFn(onChange, [oldValue, value]);
};
const autoPlayUseChangeEvent = () => {
if (!autoplay) return;
let eventTimer: any = null;
const startEvent = () => {
eventTimer = window.setInterval(() => {
handleChange('nextElementSibling', elems[0]);
}, autoplaySpeed);
};
const pauseEvent = () =>
eventTimer ? window.clearInterval(eventTimer) : false;
startEvent();
if (hoverPause === 'false') return;
bind(target, 'mouseenter', () => pauseEvent());
bind(target, 'mouseleave', () => startEvent());
};
const handleClick = () => {
elems.forEach((elem, index) => {
bind(elem, 'click', () => onClick && type.isFn(onClick, index));
});
};
bind(LeftArrow, 'click', () =>
handleChange('previousElementSibling', elems[lastIndex])
);
bind(RightArrow, 'click', () => handleChange('nextElementSibling', elems[0]));
handleClick();
autoPlayUseChangeEvent();
}
};
}
private _create(components: NodeListOf<Element>): void {
components.forEach((node) => {
if (moreThanOneNode(node)) return;
const placeholderNode = node.firstElementChild;
if (!placeholderNode) return;
const carouselItemCount = placeholderNode.childElementCount;
const {
dots,
arrow,
effect,
easing,
radiusDot,
trigger,
autoplay,
hoverPause,
autoplaySpeed
} = this._attrs(node);
this._setMainTemplate(node, dots, arrow);
this._setFadeCls(node, effect);
this._setItem(node, placeholderNode, carouselItemCount, easing);
this._setIndicators(node, carouselItemCount, radiusDot, trigger);
this._autoPlay(autoplay, node, hoverPause, autoplaySpeed);
this._handleArrowClick(node, arrow);
removeAttrs(node, [
'dots',
'arrow',
'effect',
'easing',
'trigger',
'radius-dot',
'autoplay',
'hover-pause',
'autoplay-speed'
]);
});
}
private _setMainTemplate(node: Element, dots: string, arrow: string): void {
const template = `
<button type="button" class="left ${PREFIX.carousel}-arrow ${PREFIX.carousel}-arrow-${arrow}">
<i class="${PREFIX.icon} ${PREFIX.icon}-ios-arrow-back"></i>
</button>
<div class="${PREFIX.carousel}-list"></div>
<button type="button" class="right ${PREFIX.carousel}-arrow ${PREFIX.carousel}-arrow-${arrow}">
<i class="${PREFIX.icon} ${PREFIX.icon}-ios-arrow-forward"></i>
</button>
<ul class="${PREFIX.carousel}-dots ${PREFIX.carousel}-dots-${dots}"></ul>
`;
setHtml(node, template);
}
private _setFadeCls(node: Element, effect: string): void {
effect === 'fade' ? node.classList.add(`${PREFIX.carousel}-${effect}`) : '';
}
private _setItem(
node: Element,
placeholderNode: Element,
carouselItemCount: number,
esaing: string
): void {
const CarouselList = node.querySelector(`.${PREFIX.carousel}-list`);
const Fragment = document.createDocumentFragment();
const children = Array.from(placeholderNode.children);
let i = 0;
for (; i < carouselItemCount; i++) {
const CarouselItem = createElem('div');
CarouselItem.dataset['index'] = `${i}`;
CarouselItem.className = `${PREFIX.carousel}-item`;
CarouselItem.appendChild(children[i]);
this._setEasing(CarouselItem, esaing);
Fragment.appendChild(CarouselItem);
Fragment.firstElementChild?.classList.add('active');
}
CarouselList?.appendChild(Fragment);
}
private _setEasing(item: HTMLElement, easing: string): void {
if (!easing) return;
setCss(item, 'transitionTimingFunction', easing);
}
private _setIndicators(
node: Element,
carouselItemCount: number,
radiusDot: boolean,
trigger: string
): void {
const CarouselDots = node.querySelector(`.${PREFIX.carousel}-dots`);
const Fragment = document.createDocumentFragment();
let i = 0;
for (; i < carouselItemCount; i++) {
const CarouselDot = createElem('li');
CarouselDot.dataset['slideTo'] = `${i}`;
setHtml(
CarouselDot,
`<button type="button" class=${radiusDot ? 'radius' : ''}></button>`
);
this._handleDotChange(trigger, node, CarouselDot);
Fragment.appendChild(CarouselDot);
Fragment.firstElementChild?.classList.add(`${PREFIX.carousel}-active`);
}
CarouselDots?.appendChild(Fragment);
}
private _autoPlay(
autoplay: boolean,
node: Element,
hoverPause: string,
autoplaySpeed: number
): void {
if (!autoplay) return;
let autoPlayTimer: any = null;
const play = () => {
let speed = autoplaySpeed;
// 当轮播图自动播放的切换速度低于 650ms 会出现问题,
// 因此低于这个数值的都会被重置为 650ms。
if (speed < 650) {
speed = 650;
console.warn(
`[Rabbit warn] Please do not set the sliding speed of carousel to less than 650ms. There are known problems with doing so, so it has been reset to 650ms. --> ${autoplaySpeed}ms`
);
}
autoPlayTimer = window.setInterval(() => this._slide('next', node), speed);
};
play();
if (hoverPause === 'false') return;
const pause = () => (autoPlayTimer ? window.clearInterval(autoPlayTimer) : false);
bind(node, 'mouseenter', () => pause());
bind(node, 'mouseleave', () => play());
}
private _handleArrowClick(node: Element, arrow: string): void {
if (arrow === 'none') return;
const LeftArrow = node.querySelector(`.${PREFIX.carousel}-arrow.left`);
const RightArrow = node.querySelector(`.${PREFIX.carousel}-arrow.right`);
bind(LeftArrow, 'click', () => this._slide('prev', node));
bind(RightArrow, 'click', () => this._slide('next', node));
}
private _handleDotChange(trigger: string, node: Element, dot: HTMLElement): void {
let activeIndex: number, activeElem: Element;
const _C = () => {
activeIndex = Number(dot.dataset['slideTo']);
activeElem = node.querySelector(
`.${PREFIX.carousel}-item[data-index="${activeIndex}"]`
)!;
if (activeElem.classList.contains('active')) return;
this._dotChange(node, activeIndex);
this._showActiveItem(activeElem);
siblings(activeElem).forEach((otherElem: HTMLElement) =>
otherElem.classList.contains('active') ? this._hideOldActiveItem(otherElem) : ''
);
};
if (trigger === 'hover') {
bind(dot, 'mouseenter', () => _C());
} else {
bind(dot, 'click', () => _C());
}
}
private _slide(type: 'prev' | 'next', node: Element): void {
const direction = type === 'prev' ? 'right' : 'left';
const CarouselList = node.querySelector(`.${PREFIX.carousel}-list`)!;
const firstIndex = 0;
const lastIndex = CarouselList.childElementCount - 1;
const ActiveItem = node.querySelector(`.${PREFIX.carousel}-item.active`)! as HTMLElement;
const PrevItem = ActiveItem.previousElementSibling || CarouselList.children[lastIndex];
const NextItem = ActiveItem.nextElementSibling || CarouselList.children[firstIndex];
const __change = (elem: Element) => this._change(type, direction, node, ActiveItem, elem);
type === 'prev' ? __change(PrevItem) : __change(NextItem);
}
private _change(
type: string,
direction: string,
node: Element,
oldActiveItem: Element,
curActiveItem: Element
): void {
// @ts-ignore
const activeIndex = Number(curActiveItem.dataset['index']);
this._dotChange(node, activeIndex);
this._showActiveItem(curActiveItem, type, direction);
this._hideOldActiveItem(oldActiveItem, direction);
}
private _dotChange(node: Element, activeIndex: number): void {
const CarouselDots = node.querySelector(`.${PREFIX.carousel}-dots`)!;
const ActiveDot = CarouselDots.children[activeIndex];
ActiveDot.classList.add(`${PREFIX.carousel}-active`);
siblings(ActiveDot).forEach((dot: HTMLElement) =>
dot.classList.contains(`${PREFIX.carousel}-active`)
? dot.classList.remove(`${PREFIX.carousel}-active`)
: ''
);
}
private _showActiveItem(activeElem: Element, type = 'next', direction = 'left'): void {
activeElem.classList.add(`${PREFIX.carousel}-item-${type}`);
setTimeout(() => activeElem.classList.add(`${PREFIX.carousel}-item-${direction}`), 20);
setTimeout(() => {
activeElem.classList.add('active');
activeElem.classList.remove(`${PREFIX.carousel}-item-${type}`);
activeElem.classList.remove(`${PREFIX.carousel}-item-${direction}`);
}, DURATION);
}
private _hideOldActiveItem(oldElem: Element, direction = 'left'): void {
setTimeout(() => oldElem.classList.add(`${PREFIX.carousel}-item-${direction}`), 20);
setTimeout(() => {
oldElem.classList.remove('active');
oldElem.classList.remove(`${PREFIX.carousel}-item-${direction}`);
}, DURATION);
}
private _attrs(node: Element) {
return {
dots: getStrTypeAttr(node, 'dots', 'inside'),
arrow: getStrTypeAttr(node, 'arrow', 'hover'),
effect: getStrTypeAttr(node, 'effect', ''),
easing: getStrTypeAttr(node, 'easing', ''),
trigger: getStrTypeAttr(node, 'trigger', 'click'),
hoverPause: getStrTypeAttr(node, 'hover-pause', 'true'),
radiusDot: getBooleanTypeAttr(node, 'radius-dot'),
autoplay: getBooleanTypeAttr(node, 'autoplay'),
autoplaySpeed: getNumTypeAttr(node, 'autoplay-speed', AUTOPLAYSPEED)
};
}
}
export default Carousel; | the_stack |
import * as _ from 'underscore'
import {
ConfigItemValue,
ConfigManifestEntry,
IBlueprintConfig,
StudioBlueprintManifest,
ShowStyleBlueprintManifest,
BasicConfigItemValue,
TableConfigItemValue,
} from '@sofie-automation/blueprints-integration'
import { Studios, Studio, StudioId } from '../../../lib/collections/Studios'
import { Meteor } from 'meteor/meteor'
import {
ShowStyleVariantId,
ShowStyleCompound,
ShowStyleVariant,
ShowStyleVariants,
} from '../../../lib/collections/ShowStyleVariants'
import { protectString, objectPathGet, objectPathSet } from '../../../lib/lib'
import { logger } from '../../../lib/logging'
import { loadStudioBlueprint, loadShowStyleBlueprint } from './cache'
import { ShowStyleBases, ShowStyleBaseId } from '../../../lib/collections/ShowStyleBases'
import { BlueprintId, Blueprints } from '../../../lib/collections/Blueprints'
import { CommonContext } from './context'
import { ReadonlyDeep } from 'type-fest'
import { getShowStyleCompound } from '../showStyles'
/**
* This whole ConfigRef logic will need revisiting for a multi-studio context, to ensure that there are strict boundaries across who can give to access to what.
* Especially relevant for multi-user.
*/
export namespace ConfigRef {
export function getStudioConfigRef(studioId: StudioId, configKey: string): string {
return '${studio.' + studioId + '.' + configKey + '}'
}
export function getShowStyleConfigRef(showStyleVariantId: ShowStyleVariantId, configKey: string): string {
return '${showStyle.' + showStyleVariantId + '.' + configKey + '}'
}
export function retrieveRefs(
stringWithReferences: string,
modifier?: (str: string) => string,
bailOnError?: boolean
): string {
if (!stringWithReferences) return stringWithReferences
const refs = stringWithReferences.match(/\$\{[^}]+\}/g) || []
_.each(refs, (ref) => {
if (ref) {
let value = retrieveRef(ref, bailOnError) + ''
if (value) {
if (modifier) value = modifier(value)
stringWithReferences = stringWithReferences.replace(ref, value)
}
}
})
return stringWithReferences
}
function retrieveRef(reference: string, bailOnError?: boolean): ConfigItemValue | string | undefined {
if (!reference) return undefined
let m = reference.match(/\$\{([^.}]+)\.([^.}]+)\.([^.}]+)\}/)
if (m) {
if (m[1] === 'studio' && _.isString(m[2]) && _.isString(m[3])) {
const studioId: StudioId = protectString(m[2])
const configId = m[3]
const studio = Studios.findOne(studioId)
if (studio) {
return objectPathGet(studio.blueprintConfig, configId)
} else if (bailOnError)
throw new Meteor.Error(404, `Ref "${reference}": Studio "${studioId}" not found`)
} else if (m[1] === 'showStyle' && _.isString(m[2]) && _.isString(m[3])) {
const showStyleVariantId = protectString<ShowStyleVariantId>(m[2])
const configId = m[3]
const showStyleCompound = getShowStyleCompound(showStyleVariantId)
if (showStyleCompound) {
return objectPathGet(showStyleCompound.blueprintConfig, configId)
} else if (bailOnError)
throw new Meteor.Error(
404,
`Ref "${reference}": Showstyle variant "${showStyleVariantId}" not found`
)
}
}
return undefined
}
}
export function preprocessStudioConfig(studio: ReadonlyDeep<Studio>, blueprint?: StudioBlueprintManifest) {
let res: any = {}
if (blueprint && blueprint.studioConfigManifest !== undefined) {
applyToConfig(res, blueprint.studioConfigManifest, studio.blueprintConfig, `Studio ${studio._id}`)
} else {
res = studio.blueprintConfig
}
// Expose special values as defined in the studio
res['SofieHostURL'] = studio.settings.sofieUrl
if (blueprint && blueprint.preprocessConfig) {
const context = new CommonContext({
name: `preprocessStudioConfig`,
identifier: `studioId=${studio._id}`,
})
res = blueprint.preprocessConfig(context, res)
}
return res
}
export function preprocessShowStyleConfig(
showStyle: ReadonlyDeep<ShowStyleCompound>,
blueprint?: ShowStyleBlueprintManifest
) {
let res: any = {}
if (blueprint && blueprint.showStyleConfigManifest !== undefined) {
applyToConfig(res, blueprint.showStyleConfigManifest, showStyle.blueprintConfig, `ShowStyle ${showStyle._id}`)
} else {
res = showStyle.blueprintConfig
}
if (blueprint && blueprint.preprocessConfig) {
const context = new CommonContext({
name: `preprocessShowStyleConfig`,
identifier: `showStyleBaseId=${showStyle._id},showStyleVariantId=${showStyle.showStyleVariantId}`,
})
res = blueprint.preprocessConfig(context, res)
}
return res
}
export function findMissingConfigs(
manifest: ConfigManifestEntry[] | undefined,
config: ReadonlyDeep<IBlueprintConfig>
) {
const missingKeys: string[] = []
if (manifest === undefined) {
return missingKeys
}
_.each(manifest, (m) => {
if (m.required && config && objectPathGet(config, m.id) === undefined) {
missingKeys.push(m.name)
}
})
return missingKeys
}
export function applyToConfig(
res: any,
configManifest: ConfigManifestEntry[],
blueprintConfig: ReadonlyDeep<IBlueprintConfig>,
source: string
) {
for (const val of configManifest) {
let newVal = val.defaultVal
const overrideVal = objectPathGet(blueprintConfig, val.id) as
| BasicConfigItemValue
| TableConfigItemValue
| undefined
if (overrideVal !== undefined) {
newVal = overrideVal
} else if (val.required) {
logger.warning(`Required config not defined in ${source}: "${val.name}"`)
}
objectPathSet(res, val.id, newVal)
}
}
const studioBlueprintConfigCache = new Map<BlueprintId, Map<StudioId, Cache>>()
const showStyleBlueprintConfigCache = new Map<BlueprintId, Map<ShowStyleBaseId, Map<ShowStyleVariantId, Cache>>>()
interface Cache {
config: unknown
}
export function forceClearAllBlueprintConfigCaches() {
studioBlueprintConfigCache.clear()
showStyleBlueprintConfigCache.clear()
}
export function resetStudioBlueprintConfig(studio: ReadonlyDeep<Studio>): void {
for (const map of studioBlueprintConfigCache.values()) {
map.delete(studio._id)
}
getStudioBlueprintConfig(studio)
}
export function getStudioBlueprintConfig(studio: ReadonlyDeep<Studio>): unknown {
let blueprintConfigMap = studio.blueprintId ? studioBlueprintConfigCache.get(studio.blueprintId) : undefined
if (!blueprintConfigMap && studio.blueprintId) {
blueprintConfigMap = new Map()
studioBlueprintConfigCache.set(studio.blueprintId, blueprintConfigMap)
}
const cachedConfig = blueprintConfigMap?.get(studio._id)
if (cachedConfig) {
return cachedConfig.config
}
logger.debug('Building Studio config')
const studioBlueprint = loadStudioBlueprint(studio)
if (studioBlueprint) {
const diffs = findMissingConfigs(studioBlueprint.blueprint.studioConfigManifest, studio.blueprintConfig)
if (diffs && diffs.length) {
logger.warn(`Studio "${studio._id}" missing required config: ${diffs.join(', ')}`)
}
} else {
logger.warn(`Studio blueprint "${studio.blueprintId}" not found!`)
}
const compiledConfig = preprocessStudioConfig(studio, studioBlueprint?.blueprint)
blueprintConfigMap?.set(studio._id, {
config: compiledConfig,
})
return compiledConfig
}
export function resetShowStyleBlueprintConfig(showStyleCompound: ReadonlyDeep<ShowStyleCompound>): void {
for (const map of showStyleBlueprintConfigCache.values()) {
map.get(showStyleCompound._id)?.delete(showStyleCompound.showStyleVariantId)
}
getShowStyleBlueprintConfig(showStyleCompound)
}
export function getShowStyleBlueprintConfig(showStyleCompound: ReadonlyDeep<ShowStyleCompound>): unknown {
let blueprintConfigMap:
| Map<ShowStyleBaseId, Map<ShowStyleVariantId, Cache>>
| undefined = showStyleCompound.blueprintId
? showStyleBlueprintConfigCache.get(showStyleCompound.blueprintId)
: new Map()
if (!blueprintConfigMap) {
blueprintConfigMap = new Map()
showStyleBlueprintConfigCache.set(showStyleCompound.blueprintId, blueprintConfigMap)
}
let showStyleBaseMap = blueprintConfigMap.get(showStyleCompound._id)
if (!showStyleBaseMap) {
showStyleBaseMap = new Map()
blueprintConfigMap.set(showStyleCompound._id, showStyleBaseMap)
}
const cachedConfig = showStyleBaseMap.get(showStyleCompound.showStyleVariantId)
if (cachedConfig) {
return cachedConfig.config
}
const showStyleBlueprint = loadShowStyleBlueprint(showStyleCompound)
if (showStyleBlueprint) {
const diffs = findMissingConfigs(
showStyleBlueprint.blueprint.showStyleConfigManifest,
showStyleCompound.blueprintConfig
)
if (diffs && diffs.length) {
logger.warn(
`ShowStyle "${showStyleCompound._id}-${
showStyleCompound.showStyleVariantId
}" missing required config: ${diffs.join(', ')}`
)
}
} else {
logger.warn(`ShowStyle blueprint "${showStyleCompound.blueprintId}" not found!`)
}
const compiledConfig = preprocessShowStyleConfig(showStyleCompound, showStyleBlueprint?.blueprint)
showStyleBaseMap.set(showStyleCompound.showStyleVariantId, { config: compiledConfig })
return compiledConfig
}
Meteor.startup(() => {
if (Meteor.isServer) {
Studios.find(
{},
{
fields: {
_rundownVersionHash: 1,
blueprintId: 1,
},
}
).observeChanges({
changed: (id: StudioId) => {
for (const map of studioBlueprintConfigCache.values()) {
map.delete(id)
}
},
removed: (id: StudioId) => {
for (const map of studioBlueprintConfigCache.values()) {
map.delete(id)
}
},
})
ShowStyleBases.find(
{},
{
fields: {
_rundownVersionHash: 1,
blueprintId: 1,
},
}
).observeChanges({
changed: (id: ShowStyleBaseId) => {
for (const map of showStyleBlueprintConfigCache.values()) {
map.delete(id)
}
},
removed: (id: ShowStyleBaseId) => {
for (const map of showStyleBlueprintConfigCache.values()) {
map.delete(id)
}
},
})
ShowStyleVariants.find(
{},
{
fields: {
_rundownVersionHash: 1,
showStyleBaseId: 1,
_id: 1,
},
}
).observe({
changed: (doc: ShowStyleVariant) => {
for (const map of showStyleBlueprintConfigCache.values()) {
map.get(doc.showStyleBaseId)?.delete(doc._id)
}
},
removed: (doc: ShowStyleVariant) => {
for (const map of showStyleBlueprintConfigCache.values()) {
map.get(doc.showStyleBaseId)?.delete(doc._id)
}
},
})
Blueprints.find(
{},
{
fields: {
modified: 1,
},
}
).observeChanges({
changed: (id: BlueprintId) => {
studioBlueprintConfigCache.delete(id)
showStyleBlueprintConfigCache.delete(id)
},
removed: (id: BlueprintId) => {
studioBlueprintConfigCache.delete(id)
showStyleBlueprintConfigCache.delete(id)
},
})
}
}) | the_stack |
/// <reference types="node" />
import { Serializable, Browser, Page } from "puppeteer-core";
export type Channel = string;
/**
* Evaluates a function in the context of the page
* @param pageFunction Function to be evaluated in the page context.
* @param args Arguments to pass to pageFunction.
*
* If the function passed to the Window.evaluate returns a Promise,
* then Window.evaluate would wait for the promise to resolve and return its value.
*
* If the function passed to the Window.evaluate returns a non-Serializable value,
* then Window.evaluate resolves to undefined.
*/
export type EvaluateFunction = (pageFunction: ((...args: any[]) => any) | string, ...args: Serializable[]) => Promise<Serializable>;
export interface WindowOptions {
/**
* App window width in pixels.
*/
width?: number;
/**
* App window height in pixels.
*/
height?: number;
/**
* App window top offset in pixels.
*/
top?: number;
/**
* App window left offset in pixels.
*/
left?: number;
/**
* Background color using hex notation, defaults to '#ffffff'.
*/
bgcolor?: string;
}
/**
* Set of configurable options to set on the app.
*/
export interface LaunchOptions extends WindowOptions {
/**
* Browser to be used, defaults to ['stable']
*/
channel?: Channel[];
/**
* Application icon to be used in the system dock.
* Either buffer containing PNG or a path to the PNG file on the file system.
* This feature is only available in Chrome M72+.
* One can use 'canary' channel to see it in action before M72 hits stable.
*/
icon?: Buffer | string;
/**
* Optional parameters to share between Carlo instances.
*/
paramsForReuse?: any;
/**
* Application title
*/
title?: string;
/**
* Path to a User Data Directory. This folder is created upon the first app launch and contains user settings and Web storage data. Defaults to '.profile'.
*/
userDataDir?: string;
/**
* Path to a Chromium or Chrome executable to run instead of the automatically located Chrome.
* If executablePath is a relative path, then it is resolved relative to current working directory.
* Carlo is only guaranteed to work with the latest Chrome stable version.
*/
executablePath?: string;
/**
* Additional arguments to pass to the browser instance.
*/
args?: string[];
}
export type AppEvent = 'exit' | 'window';
export type WindowEvent = 'close';
export interface Bounds {
/**
* Top offset in pixels.
*/
top: number;
/**
* Left offset in pixels.
*/
left: number;
/**
* Width in pixels.
*/
width: number;
/**
* Height in pixels.
*/
height: number;
}
export interface Window {
/**
* 'close' - Emitted when the window closes.
* @param name 'close'
*/
on(name: AppEvent, callback: (...args: any[]) => void): void;
/**
* Returns window bounds
*/
bounds(): Promise<Bounds>;
/**
* Brings this window to front.
*/
bringToFront(): Promise<void>;
/**
* Closes this window.
*/
close(): Promise<void>;
evaluate: EvaluateFunction;
/**
* @param name Name of the function on the window object.
* @param carloFunction Callback function which will be called in Carlo's context.
*/
exposeFunction(name: string, carloFunction: (...args: any[]) => any): Promise<void>;
/**
* Turns the window into the full screen mode. Behavior is platform specific.
*/
fullscreen(): Promise<void>;
/**
* Navigates the corresponding web page to the given uri,
* makes given params available in web page via carlo.loadParams()
* Resolves upon DOMContentLoaded event in the web page.
* @param uri Path to the resource relative to the folder passed into serveFolder()
* @param params Optional parameters to pass to the web application.
*/
load(uri: string, params?: any): Promise<void>;
/**
* Maximizes the window. Behavior is platform-specific.
*/
maximize(): Promise<void>;
/**
* Minimizes the window. Behavior is platform-specific.
*/
minimize(): Promise<void>;
/**
* Returns Puppeteer page object for testing.
*/
pageForTest(): Page;
/**
* Returns the options.paramsForReuse value passed into the carlo.launch.
*/
paramsForReuse(): any;
/**
* Same as App.serveFolder(folder[, prefix]), but only applies to current window.
* @param folder Folder with web content to make available to Chrome.
* @param prefix Prefix of the URL path to serve from the given folder.
*/
serveFolder(folder: string, prefix?: string): void;
/**
* Same as App.serveHandler(handler), but only applies to the current window requests.
* Only single window-level handler can be installed in window.
* @param handle Network handler callback accepting the HttpRequest parameter.
*/
serveHandler(handle: (request: HttpRequest) => void): void;
/**
* Same as App.serveOrigin(base[, prefix]), but only applies to current window.
* @param base Base to serve web content from.
* @param prefix Prefix of the URL path to serve from the given folder.
*/
serveOrigin(base: string, prefix?: string): Promise<void>;
/**
* Sets window bounds. Parameters top, left, width and height are all optional.
* Dimension or the offset is only applied when specified.
* @param bounds Window bounds
*/
setBounds(bounds: {
/**
* Top offset in pixels.
*/
top?: number;
/**
* Left offset in pixels.
*/
left?: number;
/**
* Width in pixels.
*/
width?: number;
/**
* Height in pixels.
*/
height?: number;
}): Promise<void>;
}
export interface HttpRequest {
/**
* Aborts request.
* If request is a navigation request, navigation is aborted as well.
*/
abort(): Promise<void>;
/**
* Proceeds with the default behavior for this request.
* Either serves it from the filesystem or defers to the browser.
*/
continue(): Promise<void>;
/**
* Marks the request as failed.
* If request is a navigation request, navigation is still committed, but to a location that fails to be fetched.
*/
fail(): Promise<void>;
/**
* Fulfills the network request with the given data. 'Content-Length' header is generated in case it is not listed in the headers.
*/
fulfill(options: {
/**
* HTTP status code (200, 304, etc), defaults to 200.
*/
status: number;
/**
* HTTP response headers.
*/
headers: object;
/**
* Response body.
*/
body: Buffer;
}): Promise<void>;
/**
* Network request headers
*/
headers(): object;
/**
* HTTP method of this network request (GET, POST, etc.)
*/
method(): string;
/**
* Network request URL
*/
url(): string;
}
export interface App {
/**
* 'exit' - Emitted when the last window closes.
* 'window' - Emitted when the new window opens.
* @param name 'exit' or 'window'
* @param callback
*/
on(name: AppEvent, callback: (...args: any[]) => void): void;
/**
* Puppeteer browser object for testing.
*/
browserForTest(): Browser;
createWindow(options?: WindowOptions): Promise<Window>;
evaluate: EvaluateFunction;
/**
* Closes the browser window
*/
exit(): Promise<void>;
/**
* The method adds a function called name on the pages' window object.
* When called, the function executes carloFunction in Node.js and returns a Promise which resolves to the return value of carloFunction.
* @param name
* @param carloFunction
*/
exposeFunction(name: string, carloFunction: (...args: any[]) => any): Promise<void>;
/**
* Shortcut to the main window's Window.load
* @param uri
* @param params
*/
load(uri: string, ...params: any[]): Promise<void>;
/**
* Running app guarantees to have main window.
* If current main window closes, a next open window becomes the main one.
*/
mainWindow(): Window;
/**
* Makes the content of the given folder available to the Chrome web app
* @param folder Folder with web content to make available to Chrome
* @param prefix Prefix of the URL path to serve from the given folder
*/
serveFolder(folder: string, prefix?: string): void;
/**
* Handler function is called with every network request in this app.
* It can abort, continue or fulfill each request.
* Only single app-wide handler can be registered.
* @param handler Network handler callback accepting the HttpRequest parameter.
*/
serveHandler(handler: (request: HttpRequest) => void): void;
/**
* Fetches Carlo content from the specified origin instead of reading it from the file system, eg http://localhost:8080.
* This mode can be used for the fast development mode available in web frameworks.
* @param base Base to serve web content from.
* @param prefix Prefix of the URL path to serve from the given folder.
*/
serveOrigin(base: string, prefix?: string): void;
/**
* Specifies image to be used as an app icon in the system dock.
* This feature is only available in Chrome M72+. One can use 'canary' channel to see it in action before M72 hits stable.
* @param image Either buffer containing PNG or a path to the PNG file on the file system.
*/
setIcon(image: Buffer | string): void;
/**
* Returns all currently opened windows.
* Running app guarantees to have at least one open window.
*/
windows(): Window[];
}
/**
* Launches the browser
* @param options Set of configurable options to set on the app.
*/
export function launch(options?: LaunchOptions): Promise<App>;
/**
* Enters headless test mode.
*/
export function enterTestMode(): void;
/**
* Returns additional information about the file, otherwise not available to the web.
* Available in Chrome M73+
*
* @param file File to get additional information for
*/
export function fileInfo(file: any): Promise<{
/**
* absolute path to the given file
*/
path: string;
}>;
/**
* This method is available in the Web world and returns parameters passed into the window.load().
* This is how Carlo passes initial set of <rpc> handles to Node objects into the web world.
*/
export function loadParams(): Promise<ReadonlyArray<any>>; | the_stack |
import { GoslingSpec, SingleView, Track } from '../gosling.schema';
import { getBoundingBox, getRelativeTrackInfo } from './bounding-box';
import { traverseToFixSpecDownstream, overrideDataTemplates, convertToFlatTracks } from './spec-preprocess';
import { getTheme } from './theme';
describe('Fix Spec Downstream', () => {
it('Empty Views', () => {
const info = getRelativeTrackInfo(
{
arrangement: 'parallel',
views: [
{
tracks: []
}
]
},
getTheme()
);
const size = getBoundingBox(info);
expect(!isNaN(+size.width) && isFinite(size.width)).toEqual(true);
expect(!isNaN(+size.height) && isFinite(size.height)).toEqual(true);
});
it('style', () => {
{
const spec: GoslingSpec = {
style: { outline: 'red' },
views: [{ tracks: [{ overlay: [], width: 0, height: 0 }] }]
};
traverseToFixSpecDownstream(spec);
expect((spec.views[0] as SingleView).style?.outline).toEqual('red');
expect((spec.views[0] as SingleView).tracks[0].style?.outline).toEqual('red');
}
{
const spec: GoslingSpec = {
style: { outline: 'red' },
views: [{ tracks: [{ overlay: [], width: 0, height: 0, style: { outline: 'green' } }] }]
};
traverseToFixSpecDownstream(spec);
expect((spec.views[0] as SingleView).style?.outline).toEqual('red');
expect((spec.views[0] as SingleView).tracks[0].style?.outline).toEqual('green');
}
});
it('static', () => {
{
const spec: GoslingSpec = {
static: true,
arrangement: 'parallel',
views: [{ tracks: [{ overlay: [], width: 0, height: 0 }] }]
};
traverseToFixSpecDownstream(spec);
expect(spec.views[0].static).toEqual(true);
expect((spec.views[0] as any).tracks[0].static).toEqual(true);
}
{
const spec: GoslingSpec = {
layout: 'circular',
arrangement: 'parallel',
views: [{ layout: 'linear', tracks: [{ overlay: [], width: 0, height: 0 }] }]
};
traverseToFixSpecDownstream(spec);
expect(spec.views[0].static).toEqual(false);
expect((spec.views[0] as any).tracks[0].static).toEqual(false);
}
});
it('flipY if the last track (i !== 0) is using a `withinLink` mark', () => {
{
const spec: GoslingSpec = {
static: true,
arrangement: 'parallel',
views: [{ tracks: [{ overlay: [], mark: 'withinLink', width: 0, height: 0 }] }]
};
traverseToFixSpecDownstream(spec);
expect((spec.views[0] as any).tracks[0].flipY).toBeUndefined(); // must not flip if there is only one track
}
{
const spec: GoslingSpec = {
static: true,
arrangement: 'parallel',
views: [
{
tracks: [
{ overlay: [], width: 0, height: 0 },
{ overlay: [], mark: 'withinLink', width: 0, height: 0 }
]
}
]
};
traverseToFixSpecDownstream(spec);
expect((spec.views[0] as any).tracks[0].flipY).toBeUndefined();
expect((spec.views[0] as any).tracks[1].flipY).toEqual(true);
}
{
const spec: GoslingSpec = {
static: true,
arrangement: 'parallel',
views: [
{
tracks: [
{ overlay: [], width: 0, height: 0 },
{ overlay: [{ mark: 'withinLink' }], width: 0, height: 0 }
]
}
]
};
traverseToFixSpecDownstream(spec);
expect((spec.views[0] as any).tracks[0].flipY).toBeUndefined();
expect((spec.views[0] as any).tracks[1].flipY).toBeUndefined();
expect((spec.views[0] as any).tracks[1].overlay[0].flipY).toEqual(true);
}
{
const spec: GoslingSpec = {
static: true,
arrangement: 'parallel',
views: [
{
tracks: [
{ overlay: [{ mark: 'withinLink' }], width: 0, height: 0 },
{ overlay: [{ mark: 'withinLink' }], width: 0, height: 0, overlayOnPreviousTrack: true }
]
}
]
};
traverseToFixSpecDownstream(spec);
expect((spec.views[0] as any).tracks[0].flipY).toBeUndefined();
expect((spec.views[0] as any).tracks[1].flipY).toBeUndefined();
// only one track, so no flip on both
expect((spec.views[0] as any).tracks[0].overlay[0].flipY).toBeUndefined();
expect((spec.views[0] as any).tracks[1].overlay[0].flipY).toBeUndefined();
}
{
const spec: GoslingSpec = {
static: true,
arrangement: 'parallel',
views: [
{
tracks: [
{ overlay: [], width: 0, height: 0 },
{ overlay: [{ mark: 'withinLink' }], width: 0, height: 0 },
{ overlay: [{ mark: 'withinLink' }], width: 0, height: 0, overlayOnPreviousTrack: true }
]
}
]
};
traverseToFixSpecDownstream(spec);
expect((spec.views[0] as any).tracks[0].flipY).toBeUndefined();
expect((spec.views[0] as any).tracks[1].flipY).toBeUndefined();
// only one track, so no flip on both
expect((spec.views[0] as any).tracks[0].flipY).toBeUndefined();
expect((spec.views[0] as any).tracks[1].overlay[0].flipY).toBe(true);
expect((spec.views[0] as any).tracks[2].overlay[0].flipY).toBe(true);
}
{
const spec: GoslingSpec = {
static: true,
arrangement: 'parallel',
views: [
{
tracks: [
{ overlay: [], width: 0, height: 0 },
{ overlay: [{ mark: 'withinLink' }], width: 0, height: 0, overlayOnPreviousTrack: true },
{ overlay: [{ mark: 'withinLink' }], width: 0, height: 0, overlayOnPreviousTrack: true }
]
}
]
};
traverseToFixSpecDownstream(spec);
expect((spec.views[0] as any).tracks[0].flipY).toBeUndefined();
expect((spec.views[0] as any).tracks[1].flipY).toBeUndefined();
// only one track, so no flip on both
expect((spec.views[0] as any).tracks[0].flipY).toBeUndefined();
expect((spec.views[0] as any).tracks[1].overlay[0].flipY).toBeUndefined();
expect((spec.views[0] as any).tracks[2].overlay[0].flipY).toBeUndefined();
}
});
it('arrangement should be overriden', () => {
{
const spec: GoslingSpec = {
static: true,
arrangement: 'serial',
views: [{ views: [{ tracks: [{ overlay: [], width: 0, height: 0 }] }] }]
};
traverseToFixSpecDownstream(spec);
expect((spec.views[0] as any).arrangement).toEqual('serial');
}
{
const spec: GoslingSpec = {
static: true,
views: [{ views: [{ tracks: [{ overlay: [], width: 0, height: 0 }] }] }]
};
traverseToFixSpecDownstream(spec);
expect(spec.arrangement).toEqual('vertical'); // default one
expect((spec.views[0] as any).arrangement).toEqual('vertical'); // default one is overriden
}
});
it('spacing should be overriden to views but not tracks', () => {
{
const spec: GoslingSpec = {
arrangement: 'parallel',
spacing: 24,
views: [
{
arrangement: 'serial',
views: [{ tracks: [{ overlay: [], width: 0, height: 0 }] }]
}
]
};
traverseToFixSpecDownstream(spec);
expect(spec.views[0].spacing).toEqual(24);
expect((spec.views[0] as any).views[0].spacing).toBeUndefined();
}
});
it('Layout in Tracks Should Be Removed', () => {
const spec: GoslingSpec = {
arrangement: 'parallel',
views: [
{
tracks: [{ layout: 'circular', overlay: [], width: 0, height: 0 }]
}
]
};
traverseToFixSpecDownstream(spec);
expect((spec.views[0] as any).tracks[0].layout).toEqual('linear');
});
});
describe('Spec Preprocess', () => {
it('overlayOnPreviousTrack', () => {
const spec: GoslingSpec = {
layout: 'circular',
tracks: [
{ data: { type: 'csv', url: '' }, mark: 'bar', overlayOnPreviousTrack: true, width: 100, height: 100 }
]
};
traverseToFixSpecDownstream(spec);
// Should be fixed to `false` since the first track do not have a previoous track
expect(spec.tracks[0].overlayOnPreviousTrack).toEqual(false);
});
it('circular layout', () => {
const spec: GoslingSpec = {
layout: 'circular',
tracks: [{} as Track, { static: false } as Track]
};
traverseToFixSpecDownstream(spec);
expect(spec.tracks[0].layout).toEqual('circular');
// By default, `static` option is `false`.
expect(spec.tracks[0].static).toEqual(false);
// The `static` option shouldn't be changed if it is defined by a user.
expect(spec.tracks[1].static).toEqual(false);
});
it('override template (higlass-vector)', () => {
const spec: GoslingSpec = {
tracks: [{ data: { type: 'vector', url: '', column: 'c', value: 'v' }, overrideTemplate: true } as Track]
};
overrideDataTemplates(spec);
expect(spec.tracks[0]).toHaveProperty('mark');
});
it('override template (higlass-multivec)', () => {
{
const spec: GoslingSpec = {
tracks: [
{
data: { type: 'multivec', url: '', row: 'r', column: 'c', value: 'v' },
overrideTemplate: true
} as Track
]
};
overrideDataTemplates(spec);
expect(spec.tracks[0]).toHaveProperty('mark');
}
{
const spec: GoslingSpec = {
tracks: [{ data: { type: 'multivec', url: '', row: 'r', column: 'c', value: 'v' } } as Track]
};
overrideDataTemplates(spec);
expect(spec.tracks[0]).not.toHaveProperty('mark'); // overrideTemplate is not set, so do not override templates
}
});
it('Convert To FlatTracks', () => {
const dummySpec: Track = { data: { type: 'csv', url: '' }, mark: 'bar', width: 10, height: 10 };
{
const flat = convertToFlatTracks({
tracks: []
});
expect(flat).toHaveLength(0);
}
{
const flat = convertToFlatTracks({
tracks: [{ ...dummySpec, title: 'A' }]
});
expect(flat).toHaveLength(1);
expect(flat[0].title).toEqual('A');
}
{
const flat = convertToFlatTracks({
tracks: [
{ ...dummySpec, title: 'A' },
{ title: 'B', alignment: 'overlay', tracks: [{ ...dummySpec }], width: 10, height: 10 }
]
});
expect(flat).toHaveLength(2);
expect(flat[0].title).toEqual('A');
expect(flat[1].title).toEqual('B');
expect('overlay' in flat[1]).toEqual(true);
expect('overlay' in flat[1] && flat[1].overlay.length === 1).toEqual(true);
}
{
const flat2 = convertToFlatTracks({
alignment: 'stack',
color: { value: 'red' }, // should be overriden
tracks: [
{ ...dummySpec, title: 'A' },
{ title: 'B', alignment: 'overlay', tracks: [{ ...dummySpec }], width: 10, height: 10 }
]
});
expect((flat2[0] as any).color).toEqual({ value: 'red' });
}
{
const flat = convertToFlatTracks({
tracks: [
{ ...dummySpec, title: 'A' },
{ title: 'B', alignment: 'overlay', tracks: [{ ...dummySpec }], width: 10, height: 10 }
]
});
// Should be consistent when called multiple times.
expect(flat).toEqual(convertToFlatTracks({ tracks: flat }));
expect(flat).toEqual(convertToFlatTracks({ tracks: convertToFlatTracks({ tracks: flat }) }));
}
});
}); | the_stack |
import {Analyser} from 'parser/core/Analyser'
import {TimestampHookArguments, TimestampHookCallback} from 'parser/core/Dispatcher'
import {GAUGE_HANDLE} from '../ResourceGraphs/ResourceGraphs'
import {AbstractGauge, AbstractGaugeOptions, GaugeGraphOptions} from './AbstractGauge'
function expectExist<T>(value?: T) {
if (!value) {
throw new Error('Missing something required. Check the stack trace.')
}
return value
}
interface State {
timestamp: number
remaining: number
paused: boolean
}
interface Window {
start: number
end: number
}
export interface TimerGaugeOptions extends AbstractGaugeOptions {
/** Maxiumum duration of the gauge, in milliseconds. */
maximum: number
/** Callback executed when the timer expires. */
onExpiration?: TimestampHookCallback
/** Graph options. Omit to disable graphing for this gauge. */
graph?: GaugeGraphOptions,
}
export class TimerGauge extends AbstractGauge {
// Just in case I ever have to change it lmao
private readonly minimum = 0
private readonly maximum: number
private readonly expirationCallback?: TimestampHookCallback
private readonly graphOptions?: GaugeGraphOptions
private hook?: ReturnType<Analyser['addTimestampHook']>
private history: State[] = []
// TODO: Work out how to remove this reliance on having it passed down
private _addTimestampHook?: Analyser['addTimestampHook']
private get addTimestampHook() { return expectExist(this._addTimestampHook) }
private _removeTimestampHook?: Analyser['removeTimestampHook']
private get removeTimestampHook() { return expectExist(this._removeTimestampHook) }
/** The most recent state */
private get lastKnownState() {
const {length} = this.history
if (length === 0) {
return
}
return this.history[length - 1]
}
/** Time currently remaining on the timer. */
get remaining() {
// If there's no known state, we have to assume there's no time left
if (!this.lastKnownState) {
return this.minimum
}
// If we're paused, the time remaining always === specified state remaining
if (this.lastKnownState.paused) {
return this.lastKnownState.remaining
}
const delta = this.parser.currentEpochTimestamp - this.lastKnownState.timestamp
return Math.max(this.minimum, this.lastKnownState.remaining - delta)
}
/** Whether the gauge has expired. */
get expired() {
return this.remaining <= this.minimum
}
/** Whether the gauge is currently paused. */
get paused() {
// If there's no state, we're neither paused nor running - but safer to assume running.
if (!this.lastKnownState) {
return false
}
return this.lastKnownState.paused
}
/** Whether the gauge is currently running */
get active(): boolean {
return !(this.expired || this.paused)
}
constructor(opts: TimerGaugeOptions) {
super(opts)
this.maximum = opts.maximum
this.expirationCallback = opts.onExpiration
this.graphOptions = opts.graph
}
/** @inheritdoc */
reset() {
this.set(this.minimum)
}
/**
* Start the timer from its maximum value
*/
start() {
this.set(this.maximum)
}
/**
* Refresh the gauge to its maximum value.
* If the gauge has expired, this will have no effect.
*/
refresh(onlyIfRunning: boolean = true) {
if (this.expired && onlyIfRunning) {
return
}
this.start()
}
/**
* Add time to the gauge. Time over the maxium will be lost.
* If the gauge has expired, this will have no effect.
*/
extend(duration: number, onlyIfRunning: boolean = true) {
if (this.expired && onlyIfRunning) {
return
}
this.set(this.remaining + duration)
}
/** Pause the timer at its current state. */
pause() {
this.set(this.remaining, true)
}
/** Resume the timer from its paused state. */
resume() {
this.set(this.remaining, false)
}
/** Set the time remaining on the timer to the given duration. Value will be bounded by provided maximum. */
set(duration: number, paused: boolean = false) {
const timestamp = this.parser.currentEpochTimestamp
// Push the timer state prior to the event into the history
this.history.push({
timestamp,
remaining: this.remaining,
paused: this.paused,
})
const remaining = Math.max(this.minimum, Math.min(duration, this.maximum))
// Push a new state onto the history
this.history.push({
timestamp,
remaining,
paused,
})
// Remove any existing hook
if (this.hook) {
this.removeTimestampHook(this.hook)
}
// If we've not yet expired, and we're not paused, set up a hook to wait for that
if (!paused && remaining > 0) {
this.hook = this.addTimestampHook(timestamp + remaining, this.onExpiration)
}
}
raise() { /** noop */ }
init() {
if (this.history.length === 0) {
this.reset()
}
}
private onExpiration = (args: TimestampHookArguments) => {
if (this.expirationCallback) {
this.expirationCallback(args)
}
this.history.push({
timestamp: this.parser.currentEpochTimestamp,
remaining: this.remaining,
paused: false,
})
}
/** @inheritdoc */
override generateResourceGraph() {
// Skip charting if they've not enabled it
if (!this.graphOptions) {
return
}
// Insert a data point at the end of the timeline
this.pause()
const {handle, label, color} = this.graphOptions
const graphData = {
label,
colour: color,
data: this.history.map(entry => {
return {time: entry.timestamp, current: entry.remaining / 1000, maximum: this.maximum / 1000}
}),
linear: true,
}
if (handle != null) {
this.resourceGraphs.addDataGroup({...this.graphOptions, handle})
this.resourceGraphs.addData(handle, graphData)
} else {
this.resourceGraphs.addGauge(graphData, {...this.graphOptions, handle: GAUGE_HANDLE})
}
}
// Junk I wish I didn't need
setAddTimestampHook(value: Analyser['addTimestampHook']) {
this._addTimestampHook = value
}
setRemoveTimestampHook(value: Analyser['removeTimestampHook']) {
this._removeTimestampHook = value
}
private internalExpirationTime(start: number = this.parser.pull.timestamp, end: number = this.parser.currentEpochTimestamp, downtimeWindows: Window[] = [], reapplyAfterDowntime: number = 0) {
let currentStart: number | undefined = undefined
const expirationWindows: Window[] = []
this.history.forEach(entry => {
if (entry.remaining <= this.minimum && currentStart == null) {
currentStart = entry.timestamp
}
if (entry.remaining > this.minimum && currentStart != null) {
// Don't clutter the windows if the expiration of the timer may also restart it (Polyglot, Lilies, etc.)
if (entry.timestamp > currentStart) {
expirationWindows.push({start: currentStart, end: entry.timestamp})
}
currentStart = undefined
}
})
if (expirationWindows.length === 0) { return [] }
const expirations: Window[] = []
expirationWindows.forEach(expiration => {
// If the expiration had some duration within the time range we're asking about, we'll add it
if ((expiration.end > start || expiration.start < end)) {
/**
* If we were given any downtime windows, check if this expiration started within one, and change the effective start of the expiration
* to the end of the downtime window, plus any additional leniency if specified
*/
if (downtimeWindows.length > 0) {
downtimeWindows.filter(uta => expiration.start >= uta.start && expiration.start <= uta.end)
.forEach(uta => expiration.start = Math.min(expiration.end, uta.end + reapplyAfterDowntime))
}
// If the window still has any effective duration, we'll return it
if (expiration.start < expiration.end) {
expirations.push(expiration)
}
}
})
return expirations
}
/**
* Gets whether the timer was expired at a particular time
* @param when The timestamp in question
* @returns True if the timer was expired at this timestamp, false if it was active or paused
*/
public isExpired(when: number = this.parser.currentEpochTimestamp) {
return this.internalExpirationTime(when, when).length > 0
}
/**
* Gets the total amount of time that the timer was expired during a given time range.
* @param start The start of the time range. To forgive time at the start of the fight, set this to this.parser.pull.timestamp + forgivenness amount
* @param end The end of the time range.
* @param downtimeWindows Pass to forgive any expirations that began within one of these windows of time. The start of any affected expiration will be reset to the end of the affecting window
* When using a timer that should only forgive expirations when you are completely unable to act, use the windows from UnableToAct.getWindows()
* To also forgive expirations due to death or due to no enemy being targetable, use the windows from Downtime.getWindows()
* @param reapplyAfterDowntime Pass to grant additional leniency when an expiration occurred during a downtime window. This is added to the end of the window when recalculating the expiration start time
* @returns The total effective time that the timer was expired for
*/
public getExpirationTime(start: number = this.parser.pull.timestamp, end: number = this.parser.currentEpochTimestamp, downtimeWindows: Window[] = [], reapplyAfterDowntime: number = 0) {
return this.internalExpirationTime(start, end, downtimeWindows, reapplyAfterDowntime).reduce(
(totalExpiration, currentWindow) => totalExpiration + Math.min(currentWindow.end, end) - Math.max(currentWindow.start, start),
0,
)
}
/**
* Gets the array of windows that the timer was expired for during a given time range.
* @param start The start of the time range. To forgive time at the start of the fight, set this to this.parser.pull.timestamp + forgivenness amount
* @param end The end of the time range.
* @param downtimeWindows Pass to forgive any expirations that began within one of these windows of time. The start of any affected expiration will be reset to the end of the affecting window
* When using a timer that should only forgive expirations when you are completely unable to act, use the windows from UnableToAct.getWindows()
* To also forgive expirations due to death or due to no enemy being targetable, use the windows from Downtime.getWindows()
* @param reapplyAfterDowntime Pass to grant additional leniency when an expiration occurred during a downtime window. This is added to the end of the window when recalculating the expiration start time
* @returns The array of expiration windows
*/
public getExpirationWindows(start: number = this.parser.pull.timestamp, end: number = this.parser.currentEpochTimestamp, downtimeWindows: Window[] = [], reapplyAfterDowntime: number = 0) {
return this.internalExpirationTime(start, end, downtimeWindows, reapplyAfterDowntime).reduce<Window[]>(
(windows, window) => {
windows.push({
start: Math.max(window.start, start),
end: Math.min(window.end, end),
})
return windows
},
[],
)
}
} | the_stack |
import * as assert from 'assert'
import { Prompter, PromptResult } from '../../../shared/ui/prompter'
import {
isWizardControl,
StateWithCache,
StepEstimator,
Wizard,
WizardState,
WIZARD_BACK,
WIZARD_EXIT,
} from '../../../shared/wizards/wizard'
interface TestWizardForm {
prop1: string
prop2?: number
prop3: string
prop4: string
nestedProp: {
prop1: string
prop2?: boolean
}
}
interface AssertStepMethods {
/** Order is 1-indexed, e.g. onCall(1) === onFirstCall() */
onCall(...order: number[]): void
onFirstCall(): void
onSecondCall(): void
onThirdCall(): void
onFourthCall(): void
onEveryCall(): void
}
type StepTuple = [current: number, total: number]
type StateResponse<T, S> = (state: StateWithCache<S, T>) => PromptResult<T>
type TestResponse<T, S> = PromptResult<T> | StateResponse<T, S>
function makeGreen(s: string): string {
return `\u001b[32m${s}\u001b[0m`
}
function makeExpectedError(message: string, actual: any, expected: any): string {
return `${message}\n\tActual: ${actual}\n\t${makeGreen(`Expected: ${expected}`)}`
}
// TODO: rewrite this prompter to be a 'provider' type
/**
* Note: This class should be used within this test file. Use 'FormTester' instead for testing wizard implementations since
* it can test behavior irrespective of prompter ordering.
*
* Test prompters are instantiated with a finite set of responses. Responses can be raw data or functions applied to the
* current state of the wizard. A developer-friendly name can be added using 'setName'.
*/
class TestPrompter<T, S = any> extends Prompter<T> {
private readonly responses: TestResponse<T, S>[]
private readonly acceptedStates: StateWithCache<S, T>[] = []
private readonly acceptedSteps: [current: number, total: number][] = []
private readonly acceptedEstimators: StepEstimator<T>[] = []
private _totalSteps: number = 1
private _lastResponse: PromptResult<T>
private promptCount: number = 0
private name: string = 'Test Prompter'
public get recentItem(): PromptResult<T> {
return this._lastResponse
}
public set recentItem(response: PromptResult<T>) {
if (response !== this._lastResponse) {
this.fail(
makeExpectedError('Received unexpected cached response from wizard', response, this._lastResponse)
)
}
}
public get totalSteps(): number {
return this._totalSteps
}
constructor(...responses: TestResponse<T, S>[]) {
super()
this.responses = responses
}
public async prompt(): Promise<PromptResult<T>> {
if (this.responses.length === this.promptCount) {
this.fail('Ran out of responses')
}
const resp = this.convertFunctionResponse(this.promptCount++)
this._lastResponse = !isWizardControl(resp) ? resp : this._lastResponse
return resp
}
protected promptUser(): Promise<PromptResult<T>> {
throw new Error('Do not call this')
}
public setSteps(current: number, total: number): void {
this.acceptedSteps.push([current, total])
}
public setStepEstimator(estimator: StepEstimator<T>): void {
this.acceptedEstimators.push(estimator)
}
//----------------------------Test helper methods go below this line----------------------------//
public acceptState(state: StateWithCache<S, T>): this {
this.acceptedStates[this.promptCount] = state
return this
}
private fail(message: string): void {
assert.fail(this.makeErrorMessage(message))
}
private makeErrorMessage(message: string): string {
return `[${this.name}]: ${message}`
}
public setName(name: string): this {
this.name = name
return this
}
public setTotalSteps(total: number): void {
this._totalSteps = total
}
private convertFunctionResponse(count: number = this.promptCount): PromptResult<T> {
let response = this.responses[count]
if (typeof response === 'function') {
if (this.acceptedStates[count] === undefined) {
this.fail(`Undefined state, did you forget to bind the prompter with "acceptState"?`)
}
response = (response as StateResponse<T, S>)(this.acceptedStates[count])
}
return response
}
private checkEstimate(input: T, expected: number, when: number): void {
if (when > this.acceptedEstimators.length) {
this.fail('Cannot check estimate for a step that did not occur')
}
const estimator = this.acceptedEstimators[when - 1]
const actual = estimator(input)
if (actual !== expected) {
this.fail(makeExpectedError(`Estimator did not provide the expected steps: `, actual, expected))
}
}
public assertStepEstimate(input: T, expected: number): AssertStepMethods {
return {
onCall: (...order: number[]) => order.forEach(i => this.checkEstimate(input, expected, i)),
onFirstCall: () => this.checkEstimate(input, expected, 1),
onSecondCall: () => this.checkEstimate(input, expected, 2),
onThirdCall: () => this.checkEstimate(input, expected, 3),
onFourthCall: () => this.checkEstimate(input, expected, 4),
onEveryCall: () => this.acceptedStates.forEach((_, i) => this.checkEstimate(input, expected, i + 1)),
} as AssertStepMethods
}
private checkSteps(expected: StepTuple, when: number): void {
if (when > this.acceptedSteps.length) {
this.fail(`Cannot check step counts for a step that did not occur`)
}
const actual = this.acceptedSteps[when - 1]
if (expected !== undefined) {
assert.strictEqual(actual[0], expected[0], this.makeErrorMessage('Incorrect current step'))
assert.strictEqual(actual[1], expected[1], this.makeErrorMessage('Incorrect total steps'))
}
}
/** Check if the prompter was given the expected steps */
public assertSteps(current: number, total: number): AssertStepMethods {
return {
onCall: (...order: number[]) => order.forEach(i => this.checkSteps([current, total], i)),
onFirstCall: () => this.checkSteps([current, total], 1),
onSecondCall: () => this.checkSteps([current, total], 2),
onThirdCall: () => this.checkSteps([current, total], 3),
onFourthCall: () => this.checkSteps([current, total], 4),
onEveryCall: () => this.acceptedSteps.forEach((_, i) => this.checkSteps([current, total], i + 1)),
} as AssertStepMethods
}
public assertCallCount(count: number): void {
assert.strictEqual(this.promptCount, count, this.makeErrorMessage('Called an unexpected number of times'))
}
}
// We only need to test execution of prompters provided by the wizard form
describe('Wizard', function () {
let wizard: Wizard<TestWizardForm>
let helloPrompter: TestPrompter<string>
beforeEach(function () {
wizard = new Wizard()
helloPrompter = new TestPrompter(...Array(100).fill('hello')).setName('Hello')
})
it('binds prompter to property', async function () {
wizard.form.prop1.bindPrompter(() => helloPrompter)
assert.strictEqual((await wizard.run())?.prop1, 'hello')
})
it('initializes state to empty object if not provided', async function () {
wizard.form.prop1.bindPrompter(() => helloPrompter, { showWhen: state => state !== undefined })
assert.strictEqual((await wizard.run())?.prop1, 'hello')
})
it('processes exit signal', async function () {
wizard.form.prop1.bindPrompter(() => helloPrompter)
wizard.form.prop3.bindPrompter(() => new TestPrompter<string>(WIZARD_EXIT).setName('Exit'))
assert.strictEqual(await wizard.run(), undefined)
helloPrompter.assertCallCount(1)
})
it('users exit prompter if provided', async function () {
const checkForHello = (state: TestWizardForm) => state.prop1 !== 'hello'
const exitPrompter = new TestPrompter(checkForHello, true).setName('Exit Dialog')
const exitSignalPrompter = new TestPrompter<string>(WIZARD_EXIT, WIZARD_EXIT).setName('Exit Signal')
wizard = new Wizard({ exitPrompterProvider: state => exitPrompter.acceptState(state as any) })
wizard.form.prop1.bindPrompter(() => helloPrompter)
wizard.form.prop3.bindPrompter(() => exitSignalPrompter)
assert.strictEqual(await wizard.run(), undefined)
helloPrompter.assertCallCount(1)
exitPrompter.assertCallCount(2)
exitSignalPrompter.assertCallCount(2)
})
// test is mostly redundant (state controller handles this logic) but good to have
it('regenerates prompters when going back', async function () {
const testPrompter = new TestPrompter(WIZARD_BACK, 'goodbye').setName('Goodbye')
wizard.form.prop1.bindPrompter(() => helloPrompter)
wizard.form.prop3.bindPrompter(() => testPrompter)
assert.deepStrictEqual(await wizard.run(), { prop1: 'hello', prop3: 'goodbye' })
helloPrompter.assertCallCount(2)
testPrompter.assertCallCount(2)
})
it('applies step offset', async function () {
const testPrompter = new TestPrompter('1')
wizard.stepOffset = [4, 5]
wizard.form.prop1.bindPrompter(() => testPrompter)
assert.deepStrictEqual(await wizard.run(), { prop1: '1' })
testPrompter.assertSteps(5, 6).onFirstCall()
})
it('provides a step estimator', async function () {
const firstStep = new TestPrompter('0')
const secondStep = new TestPrompter('1')
wizard.form.prop1.bindPrompter(state => firstStep.acceptState(state))
wizard.form.prop3.bindPrompter(() => secondStep, { showWhen: state => state.prop1 === '1' })
assert.deepStrictEqual(await wizard.run(), { prop1: '0' })
firstStep.assertStepEstimate('0', 0).onFirstCall()
firstStep.assertStepEstimate('1', 1).onFirstCall()
})
it('uses a parent estimator if provided', async function () {
const parentEstimator = (state: TestWizardForm) => (state.prop1 === '1' ? (state.prop3 === '1' ? 2 : 1) : 0)
const firstStep = new TestPrompter('1', '0', '1')
const secondStep = new TestPrompter<string>(WIZARD_BACK, '1')
const thirdStep = new TestPrompter(WIZARD_BACK, 1)
wizard.parentEstimator = parentEstimator
wizard.form.prop1.bindPrompter(state => firstStep.acceptState(state))
wizard.form.prop3.bindPrompter(state => secondStep.acceptState(state), {
showWhen: state => state.prop1 === '1',
})
wizard.form.prop2.bindPrompter(state => thirdStep.acceptState(state))
assert.deepStrictEqual(await wizard.run(), { prop1: '1', prop2: 1, prop3: '1' })
firstStep.assertStepEstimate('0', 0).onEveryCall()
secondStep.assertStepEstimate('1', 2).onSecondCall()
})
it('does not apply control values to state when going back', async function () {
const noWizardControl = (state: StateWithCache<WizardState<TestWizardForm>, string>) => {
assert.strictEqual(
isWizardControl(state.prop2),
false,
'Wizard flow control should not appear in wizard state'
)
return 'good'
}
const testPrompter1 = new TestPrompter('first', noWizardControl).setName('Test 1')
const testPrompter2 = new TestPrompter(WIZARD_BACK, 22).setName('Test 2')
wizard.form.prop1.bindPrompter(state => testPrompter1.acceptState(state))
wizard.form.prop2.bindPrompter(() => testPrompter2)
assert.deepStrictEqual(await wizard.run(), { prop1: 'good', prop2: 22 })
})
describe('prompter state', function () {
// Execution order: 1 -> 2 -> 1 -> 2
it('accurately assigns current/total steps', async function () {
const testPrompter1 = new TestPrompter('1', '2', '3').setName('Test 1')
const testPrompter2 = new TestPrompter(WIZARD_BACK, 4).setName('Test 2')
testPrompter1.setTotalSteps(2)
wizard.form.prop1.bindPrompter(() => testPrompter1)
wizard.form.prop2.bindPrompter(() => testPrompter2)
assert.deepStrictEqual(await wizard.run(), { prop1: '2', prop2: 4 })
testPrompter1.assertCallCount(2)
testPrompter2.assertCallCount(2)
testPrompter1.assertSteps(1, 2).onFirstCall()
testPrompter2.assertSteps(3, 3).onSecondCall()
})
// A --> Path 1
// / \
// Start End
// \ /
// B --> Path 2
//
// Execution order:
// Start -> Path 1 -> End -> Path 1 -> Start -> Path 2 -> Start -> Path 1 -> Start -> Path 2 -> End -> Path 2 -> End
it('sets total steps correctly when branching', async function () {
const helloFunction = (state: WizardState<TestWizardForm>) =>
state.prop1 === 'B' ? `hello ${state.prop3}` : `extra step`
const testPrompterStart = new TestPrompter('A', 'B', 'A', 'B').setName('Start')
const testPrompterPath1 = new TestPrompter(99, WIZARD_BACK, WIZARD_BACK, 10).setName('Path 1')
const testPrompterPath2 = new TestPrompter(WIZARD_BACK, 'alice', 'bob').setName('Path 2')
const testPrompterEnd = new TestPrompter(WIZARD_BACK, WIZARD_BACK, helloFunction).setName('End')
testPrompterPath1.setTotalSteps(2)
testPrompterPath2.setTotalSteps(3)
wizard.form.prop1.bindPrompter(() => testPrompterStart)
wizard.form.prop2.bindPrompter(() => testPrompterPath1, { showWhen: state => state.prop1 === 'A' })
wizard.form.prop3.bindPrompter(() => testPrompterPath2, { showWhen: state => state.prop1 === 'B' })
wizard.form.prop4.bindPrompter(state => testPrompterEnd.acceptState(state))
assert.deepStrictEqual(await wizard.run(), { prop1: 'B', prop3: 'bob', prop4: 'hello bob' })
testPrompterStart.assertCallCount(4)
testPrompterPath1.assertCallCount(3)
testPrompterPath2.assertCallCount(3)
testPrompterEnd.assertCallCount(3)
testPrompterStart.assertSteps(1, 2).onEveryCall()
testPrompterPath1.assertSteps(2, 3).onEveryCall()
testPrompterPath2.assertSteps(2, 3).onEveryCall()
testPrompterEnd.assertSteps(4, 4).onFirstCall()
testPrompterEnd.assertSteps(5, 5).onCall(2, 3)
})
})
}) | the_stack |
import { Injectable } from '@angular/core';
import { Plugin, Cordova, AwesomeCordovaNativePlugin } from '@awesome-cordova-plugins/core';
export interface Config {
/**
* Hostname or IP address of the IMAP service, for example: imap.gmail.com, imap-mail.outlook.com....
*/
host: string;
/**
* Optional parameter. Port of the IMAP server to connect.
* Default set to: 993
*/
port?: number;
/**
* iOS ONLY
* Optional parameter. Encryption type to use.
* Default set to: TLS/SSL
*/
connectionType?: ConnectionType;
/**
* Username or email address for authentication.
*/
user: string;
/**
* Password for authentication.
*/
password: string;
}
export interface Connection {
/**
* Connection Status: true or false, "true" means connected successfully; "false" means failed to connect.
*/
status: boolean;
/**
* Optional parameter. A connection string, returned as a confirmation for successful connection.
*/
connection?: string;
/**
* Optional parameter. Error, returned if the connecting process has failed.
*/
exception?: string;
}
export interface MessageHeaders {
/**
* Message consecutive number.
*/
messageNumber: number;
/**
* The name of the folder where the message is contained.
*/
folder: string;
/**
* Sender's data.
*/
from: Address[];
/**
* TO recipients data.
*/
toRecipients: Address[];
/**
* CC recipients data.
*/
ccRecipients: Address[];
/**
* BCC recipients data.
*/
bccRecipients: Address[];
/**
* The date when the message was received.
*/
receivedDate: string;
/**
* Message's subject header.
*/
subject: string;
/**
* Message's active flags.
*/
flags: string[];
}
export interface Message {
/**
* Message consecutive number.
*/
messageNumber: number;
/**
* The name of the folder where the message is contained.
*/
folder: string;
/**
* Sender's data.
*/
from: Address[];
/**
* All recipients data.
*/
allRecipients: Address[];
/**
* TO recipients data.
*/
toRecipients: Address[];
/**
* CC recipients data.
*/
ccRecipients: Address[];
/**
* BCC recipients data.
*/
bccRecipients: Address[];
/**
* Reply data.
*/
replyTo: Address[];
/**
* Date when the message was sent.
*/
sentDate: string;
/**
* The date when the message was received.
*/
receivedDate: string;
/**
* Message's subject header.
*/
subject: string;
/**
* Android ONLY
* Optional. Short description for the message.
*/
description?: string;
/**
*
*/
fileName: string;
/**
* Android ONLY
* Optional.
*/
disposition?: string;
/**
* Message's active flags.
*/
flags: string[];
/**
* Android ONLY
* Optional.
*/
lineCount?: number;
/**
* Android ONLY
* Optional. All Headers available on a message.
*/
allMessageHeaders?: object;
/**
* Android ONLY
* Optional. Type of message's content.
*/
contentType?: string;
/**
* Message's body with its content and attachments.
*/
bodyContent: Content[];
/**
* Message's memory size.
*/
size: number;
}
export interface Address {
/**
* Email address.
*/
address: string;
/**
* Optional. Name of the email address's owner.
*/
personal?: string;
/**
* Android ONLY
* Optional. Data type.
*/
type?: string;
}
export interface Content {
/**
* Content data type.
*/
type: string;
/**
* Optional. The name of the file.
*/
fileName?: string;
/**
* Message's content.
*/
content: string;
}
export interface ModificationResult {
/**
* Status of the applied changes. "True" success; "False" failure
*/
status: boolean;
/**
* Array with consecutive numbers of modified messages.
*/
modifiedMessages: number[];
}
export enum ConnectionType {
/**
* Clear-text connection for the protocol.
*/
Clear = 'Clear',
/**
* Starts with clear-text connection at the beginning, then switch to encrypted connection using TLS/SSL.
*/
StartTLS = 'StartTLS',
/**
* Encrypted connection using TLS/SSL.
*/
TLSSSL = 'TLS/SSL',
}
export enum Comparison {
/**
* Android ONLY
* The less than or equal to operator.
*/
LE = 'LE',
/**
* The less than operator.
*/
LT = 'LT',
/**
* The equality operator.
*/
EQ = 'EQ',
/**
* The not equal to operator.
*/
NE = 'NE',
/**
* Android ONLY
* The greater than operator.
*/
GT = 'GT',
/**
* The greater than or equal to operator.
*/
GE = 'GE',
}
/**
* All available message flags. Set or remove flag using "setFlag()".
*/
export enum FlagEnum {
/**
* "ANSWERED" message flag
*/
ANSWERED = 'ANSWERED',
/**
* "DRAFT" message flag
*/
DRAFT = 'DRAFT',
/**
* "FLAGGED" message flag
*/
FLAGGED = 'FLAGGED',
/**
* Android ONLY
* "RECENT" message flag
*/
RECENT = 'RECENT',
/**
* "SEEN" message flag
*/
SEEN = 'SEEN',
/**
* Android ONLY
* "USER" message flag
*/
USER = 'USER',
/**
* "DELETED" message flag. Note: Add this flag to delete the message from the mailbox
*/
DELETED = 'DELETED',
/**
* iOS ONLY
* "SENT" message flag.
*/
SENT = 'Sent',
/**
* iOS ONLY
* "FORWARDED" message flag.
*/
FORWARDED = 'Forwarded',
/**
* iOS ONLY
* "SubmitPending" message flag.
*/
SubmitPending = 'SubmitPending',
/**
* iOS ONLY
* "SUBMITTED" message flag.
*/
SUBMITTED = 'Submitted',
}
/**
* @name Imap
* @description
* This plugin will enable a Cordova application to use the IMAP (Internet Message Access Protocol) features
* The plugin offers support for Android and iOS.
* To enable the IMAP features on Android, this plugin uses the framework [Java Mail API](https://javaee.github.io/javamail/) and for iOS, it uses the [MailCore 2](http://libmailcore.com/) library.
*
*
* @usage
* ```typescript
* import { Imap } from '@awesome-cordova-plugins/imap/ngx';
*
*
* constructor(private imap: Imap) { }
*
* ...
*
*
* this.imap.connect({
* host: 'imap.gmail.com',
* port: 993,
* connectionType: ConnectionType.TLSSSL // (iOS ONLY) Encryption type to use. Default set to: TLS/SSL
* user: 'my_email@gmail.com',
* password: 'my-pass'
* })
* .then((res: Connection) => console.log(res))
* .catch((error) => console.error(error));
*
*
*
* this.imap.disconnect()
* .then((res: boolean) => console.log(res))
* .catch((error: any) => console.error(error));
*
*
*
* this.imap.isConnected()
* .then((res: boolean) => console.log(res))
* .catch((error: any) => console.error(error));
*
* // Note: Connected to an IMAP service is REQUIRED to be able to get data from the below functions.
*
*
* // listMailFolders('*') using a '*' pattern will return all folder names
* // listMailFolders('INBOX*') using a pattern with a folder name will list all the subfolder names of that folder that match the pattern
*
* this.imap.listMailFolders('*')
* .then((res: boolean) => console.log(res))
* .catch((error: any) => console.error(error));
*
*
* this.imap.getMessageCountByFolderName('INBOX')
* .then((res: number) => {
* // Returns the count of the messages in the folder as a result
* console.log(res)
* })
* .catch((error: any) => {
* console.error(error)
* });
*
*
*
* this.imap.searchMessagesByDatePeriod('INBOX', 1601503200000, Comparison.GE)
* .then((res: number[]) => {
* // Returns array with messages' consecutive numbers
* // ex. [1207, 1208, 1209]
* console.log(res)
* })
* .catch((error: any) => {
* console.error(error)
* });
*
*
* this.imap.listMessagesHeadersByConsecutiveNumber('INBOX', 1200, 1280)
* .then((res: MessageHeaders[]) => {
* // Returns array with messages' headers data
* console.log(res)
* })
* .catch((error: any) => {
* console.error(error)
* });
*
*
* this.imap.listMessagesHeadersByDate('INBOX', 1601503200000, Comparison.GE)
* .then((res: MessageHeaders[]) => {
* // Returns array with messages' headers data
* console.log(res)
* })
* .catch((error: any) => {
* console.error(error)
* });
*
*
* this.imap.getFullMessageData('INBOX', 1205)
* .then((res: Message) => {
* // Returns "Message" object with the full message data including attachments.
* console.log(res)
* })
* .catch((error: any) => {
* console.error(error)
* });
*
*
* this.imap.getFullMessageDataOnNewSession({
* host: 'imap.gmail.com',
* port: 993,
* connectionType: ConnectionType.TLSSSL // (iOS ONLY) Encryption type to use. Default set to: TLS/SSL
* user: 'my_email@gmail.com',
* password: 'my-pass'
* }, 'INBOX', 1205)
* .then((res: Message) => {
* // Returns "Message" object with the full message data including attachments.
* console.log(res)
* })
* .catch((error: any) => {
* console.error(error)
* });
*
*
* this.imap.copyToFolder('INBOX', 'Spam', [1204, 1205, 1206, 1207])
* .then((res: boolean) => {
* // Returns "true" if the process is successful, else returns "false".
* console.log(res)
* })
* .catch((error: any) => {
* console.error(error)
* });
*
*
* * Sets a flag on a message
* * "setFlag()" can be used for deleting messages setting the Delete flag to "FlagEnum.DELETED"
* this.imap.setFlag('INBOX', [1206, 1205, 1204], FlagEnum.SEEN, true)
* .then((res: ModificationResult) => {
*
* // res.status - return true or false based on the deletion success
*
* //res.modifiedMessages - for ex.[1206, 1205, 1204];
*
* })
* .catch((error: any) => {
* console.error(error)
* });
*
* ```
*/
@Plugin({
pluginName: 'Imap',
plugin: 'cordova-plugin-imap',
pluginRef: 'imap',
repo: 'https://github.com/aleksandar888/cordova-plugin-imap.git',
platforms: ['Android', 'iOS'],
})
@Injectable()
export class Imap extends AwesomeCordovaNativePlugin {
/**
* This function "connect(clientData: Config)" tries to connect and authenticate with the IMAP server.
* @param clientData {Config} Connection configuration
* @return {Promise<Connection>} Returns a promise with the connection data
*/
@Cordova()
connect(clientData: Config): Promise<Connection> {
return;
}
/**
* "disconnect()" Closes the connection with the server.
* @return {Promise<boolean>} Returns a promise status.
*/
@Cordova()
disconnect(): Promise<boolean> {
return;
}
/**
* "isConnected()" Checks the current state of the connection.
* @return {Promise<boolean>} Returns a promise with connection status
*/
@Cordova()
isConnected(): Promise<boolean> {
return;
}
/** Note: Connected to an IMAP service is REQUIRED to be able to get data from the below functions. */
/**
* Returns an array of Folder names based on a regular expression pattern.
*
* Example:
*
* listMailFolders('*') using a '*' pattern will return all folder names
* listMailFolders('INBOX*') using a pattern with a folder name will list all the subfolder names that match the pattern
*
*
* "listMailFolders(pattern: string)" Lists the name of mail folders in the mailbox.
* @param pattern {string} Regular expression pattern.
* @return {Promise<string[]>} Returns array of folder names matching the pattern.
*/
@Cordova()
listMailFolders(pattern: string): Promise<string[]> {
return;
}
/**
* "getMessageCountByFolderName(folderName: string)" Gets the count of the messages in the folder.
* @param folderName {string} The name of the desired folder.
* @return {Promise<number>} Returns the consecutive number of the last message.
*/
@Cordova()
getMessageCountByFolderName(folderName: string): Promise<number> {
return;
}
/**
* "searchMessagesByDatePeriod(folderName: string, dateInMilliseconds: number, comparison: Comparison)" Returns the messages' consecutive number.
* @param folderName {string} The name of the desired folder
* @param dateInMilliseconds {number} Date in milliseconds
* @param comparison {Comparison} A comparison operator
* @return {Promise<number[]>} Returns array with the messages' consecutive numbers.
*/
@Cordova()
searchMessagesByDatePeriod(
folderName: string,
dateInMilliseconds: number,
comparison: Comparison
): Promise<number[]> {
return;
}
/**
* "listMessagesHeadersByConsecutiveNumber(folderName: string, start: number, end: number)" Returns messages' headers data based on a "start" and "end" message consecutive number.
* @param folderName {string} The name of the desired folder
* @param start {number} Consecutive number of the first message.
* @param end {number} Consecutive number of the last message
* @return {Promise<MessageHeaders[]>} Returns array with the messages' headers data.
*/
@Cordova()
listMessagesHeadersByConsecutiveNumber(folderName: string, start: number, end: number): Promise<MessageHeaders[]> {
return;
}
/**
* "listMessagesHeadersByDate(folderName: string, dateInMilliseconds: number, comparison: Comparison)" Returns messages' headers data based on a date.
* @param folderName {string} The name of the desired folder
* @param dateInMilliseconds {number} Date in milliseconds.
* @param comparison {Comparison} A comparison operator
* @return {Promise<MessageHeaders[]>} Returns array messages' headers data.
*/
@Cordova()
listMessagesHeadersByDate(
folderName: string,
dateInMilliseconds: number,
comparison: Comparison
): Promise<MessageHeaders[]> {
return;
}
/**
* "getFullMessageData(folderName: string, messageNumber: number)" Returns the full message data including its attachments.
* @param folderName {string} The name the message's folder.
* @param messageNumber {number} Message's consecutive number.
* @return {Promise<Message>} Returns "Message" object with full message data.
*/
@Cordova()
getFullMessageData(folderName: string, messageNumber: number): Promise<Message> {
return;
}
/**
*
* This function "getFullMessageDataOnNewSession(clientData: Config, folderName: string, messageNumber: number)" downloads the full message data using a
* separate session with the server. It is suitable for downloading message data while the app is already connected to a third server.
* "getFullMessageDataOnNewSession(clientData: Config, folderName: string, messageNumber: number)" Returns the full message data including its attachments.
* @param clientData {Config} Connection configuration.
* @param folderName {string} The name the message's folder.
* @param messageNumber {number} Message's consecutive number.
* @return {Promise<Message>} Returns "Message" object with full message data.
*/
@Cordova()
getFullMessageDataOnNewSession(clientData: Config, folderName: string, messageNumber: number): Promise<Message> {
return;
}
/**
* "copyToFolder(sourceFolder: string, destinationFolder: string, messageNums: number[])" Copy messages to a desired folder.
* @param sourceFolder {string} The name of the source folder.
* @param destinationFolder {string} The name of the destination folder.
* @param messageNums {number[]} Array with messages' consecutive numbers or array with single message consecutive number.
* @return {Promise<Message>} Returns boolean status of the process.
*/
@Cordova()
copyToFolder(sourceFolder: string, destinationFolder: string, messageNums: number[]): Promise<boolean> {
return;
}
/**
* "setFlag(folderName: string, messageNums: number[], flag: FlagEnum, status: boolean)" Set or remove flag from a message
* @param folderName {string} The name of the source folder where the messages are contained.
* @param messageNums {number[]} Array with messages' consecutive numbers or array with single message consecutive number
* @param flag {FlagEnum} Desired message flag.
* @param status {boolean} Set status to "true" to set the flag on a message; or to "false" to remove the flag from the message
* @return {Promise<ModificationResult>} Returns object with status and array with messages' consecutive numbers of the modified messages
*/
@Cordova()
setFlag(folderName: string, messageNums: number[], flag: FlagEnum, status: boolean): Promise<ModificationResult> {
return;
}
} | the_stack |
import * as SerialPort from 'serialport';
import * as childProcess from 'child_process';
import * as grpc from 'grpc';
import * as path from 'path';
import * as os from 'os';
import * as fs from 'fs';
import * as yaml from 'js-yaml';
import Serial from './serial';
import { ArduinoCoreClient } from '../grpc/commands_grpc_pb';
import { CompileReq, CompileResp } from '../grpc/compile_pb';
import { Configuration, InitReq, VersionReq } from '../grpc/commands_pb';
import { Instance } from '../grpc/common_pb';
import { UploadReq, UploadResp } from '../grpc/upload_pb';
import { rejects } from 'assert';
import * as util from "./util";
import logger from "../files/logger";
export interface PortDescriptor {
manufacturer: string;
serialNumber: string;
pnpId: string;
locationId: string;
vendorId: string;
productId: string;
comName: string;
}
export interface InstallInfo {
arduino: string | null;
cli: string | null;
sketchbook: string | null;
local: string | null;
arduinoVersion: string | null;
}
export default class ArduinoCompiler {
private static client = new ArduinoCoreClient(
'localhost:50051',
grpc.credentials.createInsecure()
);
private static process: childProcess.ChildProcess;
private static instance: Instance;
private static readonly CB_TMP: string = path.join(os.tmpdir(), 'circuitblocks' + (os.type() != "Windows_NT" ? "-proj-" + os.userInfo().username : ""));
private static ARDUINO_INSTALL: string = '';
private static ARDUINO_HOME: string = '';
private static ARDUINO_LOCAL: string = '';
private static serial: Serial;
private static installInfo: InstallInfo;
private static daemonConnecting: boolean = false;
public static getInstallInfo(){
return ArduinoCompiler.installInfo;
}
public static getDaemon(): { connected: boolean, connecting: boolean }{
return {
connecting: this.daemonConnecting,
connected: this.instance != undefined
};
}
public static getDirectories() {
return {
install: this.ARDUINO_INSTALL,
home: this.ARDUINO_HOME,
local: this.ARDUINO_LOCAL
};
}
public static getSerial() {
if (this.serial === undefined) {
this.serial = new Serial();
}
return this.serial;
}
/**
* Sets the relevant Arduino directories.
* @param install Arduino install directory. Contains directories "hardware", "tools", "tools-builder", etc.
* @param home Arduino home directory. Usually in the user's My Documents. Usually contains directories "libraries"
* @param local Arduino local directory
* and "sketches".
*/
public static setup(install: string, home: string, local: string) {
this.ARDUINO_INSTALL = install;
this.ARDUINO_HOME = home;
this.ARDUINO_LOCAL = local;
}
public static checkInstall(): InstallInfo | null {
let home: string;
if (os.type() === 'Windows_NT') {
home = path.join(os.homedir(), 'AppData', 'Local');
} else if (os.type() === 'Linux') {
home = os.homedir();
} else if (os.type() === 'Darwin') {
home = os.homedir();
} else {
return null;
}
const installPath = path.join(home, os.type() == 'Windows_NT' ? 'ArduinoCLI' : '.arduino');
let info: InstallInfo = { arduino: null, cli: null, sketchbook: null, local: null, arduinoVersion: "" };
if(os.type() == "Darwin"){
let install = path.join("/Applications", "Arduino.app");
if(fs.existsSync(install)){
info.arduino = install;
}
} else if(os.type() == "Windows_NT"){
let install = path.join("C:", "Program Files (x86)", "Arduino");
if(fs.existsSync(path.join(install, "arduino.exe"))){
info.arduino = install;
}else{
install = path.join("C:", "Program Files", "Arduino");
if(fs.existsSync(path.join(install, "arduino.exe"))) {
info.arduino = install;
}
}
}
let local: string;
if (os.type() === 'Windows_NT') {
local = path.join(os.homedir(), 'AppData', 'Local', 'Arduino15');
if (!fs.existsSync(local)) {
local = path.join(os.homedir(), 'AppData', 'Roaming', 'Arduino15');
}
} else if (os.type() === 'Linux') {
local = path.join(os.homedir(), '.arduino15');
} else if (os.type() === 'Darwin') {
local = path.join(os.homedir(), 'Library', 'Arduino15');
} else {
return info;
}
if (!fs.existsSync(local)) {
return info;
}
const prefPath = path.join(local, 'preferences.txt');
const configPath = path.join(local, 'arduino-cli.yaml');
if (fs.existsSync(prefPath)) {
const preferences = this.parsePreferences(prefPath);
if (preferences != null) {
if(preferences.arduino && fs.existsSync(preferences.arduino)){
info.arduino = preferences.arduino;
}
info.sketchbook = preferences.sketchbook;
info.arduinoVersion = preferences.version;
}
}
if (fs.existsSync(configPath)) {
const config = yaml.safeLoad(fs.readFileSync(configPath));
info.local = config.arduino_data;
info.sketchbook = config.sketchbook_path;
if(config.directories && config.directories.user){
info.local = config.directories.data;
info.sketchbook = config.directories.user;
}
}
if (!fs.existsSync(installPath)) {
return info;
}
const cliPath = path.join(
installPath,
'arduino-cli' + (os.type() == 'Windows_NT' ? '.exe' : '')
);
if (fs.existsSync(cliPath)) {
info.cli = installPath;
}
if (info.arduino == null) {
let install: { version: string; path: string } | null = null;
fs.readdirSync(installPath).forEach((file) => {
const arduinoPath = path.join(installPath, file);
if (!fs.statSync(arduinoPath).isDirectory()) return;
if (!file.startsWith('arduino-')) return;
const version = file.substring(8);
if (install == null || util.isNewer(version, install.version)) {
install = { version, path: arduinoPath };
}
});
if(install){
info.arduino = install.path;
}
}
this.installInfo = info;
return info;
}
private static parsePreferences(prefPath): { arduino: string; sketchbook: string; version: string } | null {
const preferences = fs
.readFileSync(prefPath)
.toString()
.split(os.EOL);
let home: string = '';
const installs: any = {};
preferences.forEach((line) => {
const parts = line.split('=');
const prop = parts[0];
const val = parts[1];
if (prop === 'sketchbook.path') {
home = val;
} else if (prop.startsWith('last.ide') && prop.endsWith('.hardwarepath')) {
let version = prop.substring(9, prop.length - 13);
const versionPath = val.substring(0, val.length - 9);
if(fs.existsSync(versionPath)){
installs[version] = versionPath;
}
}
});
if (installs === {}) return null;
const versions = Object.keys(installs);
let newest = versions[0];
for (let i = 1; i < versions.length; i++) {
if (util.isNewer(versions[i], newest)) newest = versions[i];
}
return { arduino: installs[newest], sketchbook: home, version: newest };
}
/**
* Starts the builder daemon. Rejects if the builder couldn't be found or paths haven't been set up.
*/
public static startDaemon(): Promise<null> {
return new Promise<null>((resolve, reject) => {
if (this.installInfo == null || this.installInfo.cli == null) {
reject(new Error('Paths not set up'));
return;
}
this.daemonConnecting = true;
let cliPath = path.join(
this.installInfo.cli,
'arduino-cli' + (os.type() == 'Windows_NT' ? '.exe' : '')
);
if (!fs.existsSync(cliPath)) {
reject(new Error('CLI not found'));
return;
}
const cliCommand = os.type() == "Windows_NT"
? "arduino-cli.exe"
: "./arduino-cli";
this.process = childProcess.execFile(cliCommand, ['daemon'], { cwd: this.installInfo.cli });
const req = new InitReq();
req.setLibraryManagerOnly(false);
const conf = new Configuration();
conf.setBoardmanageradditionalurlsList([
'https://raw.githubusercontent.com/CircuitMess/MAKERphone/boardArduino/package_CircuitMess_Ringo_index.json'
]);
conf.setDatadir(this.installInfo.local);
conf.setSketchbookdir(this.installInfo.sketchbook);
req.setConfiguration(conf);
const context = this;
let tries = 0;
let restarted = false;
function connect(){
if (tries > 2 && restarted) {
context.daemonConnecting = false;
reject(new Error('Unable to connect to CLI. Please restart the program.'));
return;
}
tries++;
logger.log("Connecting to daemon, try " + tries);
console.log("Connecting to daemon, try " + tries);
function onErr(){
if(tries > 2 && !restarted){
restarted = true;
tries = 0;
logger.log("Reached 6 tries, restarting daemon");
context.stopDaemon();
context.process = childProcess.execFile(cliPath, ['daemon']);
}
setTimeout(connect, 2000);
}
try {
context.client
.init(req)
.on('data', (data) => {
context.instance = new Instance();
context.instance.setId(data.array[0][0]);
context.daemonConnecting = false;
})
.on('error', error => {
logger.log("Daemon race condition", error);
console.log("race condition");
onErr();
})
.on('end', (data) => resolve());
}catch(e){
onErr();
}
}
connect();
});
}
/**
* Stops the builder daemon.
*/
public static stopDaemon(toBeReconnected?: boolean) {
if(this.process) this.process.kill();
this.instance = undefined;
if(toBeReconnected){
this.daemonConnecting = true;
}else{
this.daemonConnecting = false;
}
}
/**
* Retrieves the possible MAKERphone ports.
* @param thirdParty accept any usb to serial
*/
public static identifyPort(thirdParty: boolean = false): Promise<PortDescriptor[]> {
return new Promise<any>((resolve, _reject) => {
SerialPort.list().then(ports => {
resolve(
ports.filter((port) =>
thirdParty
? port.vendorId && port.productId
: port.vendorId && port.productId && (
(port.vendorId.toLowerCase() === '10c4' && port.productId.toLowerCase() === 'ea60') ||
(port.vendorId.toLowerCase() === '1a86' && port.productId.toLowerCase() === '7523')
)
)
);
}).catch(err => {
_reject(err);
return;
});
});
}
/**
* Compiles the specified Arduino C code. See {@link compileSketch} for details on returned promise
* @see compileSketch
* @param code Arduino C code
* @param device fqbn
* @param progressCallback callback for progress reporting. Takes a single argument which represents percentage (0-100)
*/
public static compile(
code: string,
device: string,
progressCallback?: (number) => void,
minimal?: boolean
): Promise<{ binary: string; stdout: string[]; stderr: string[] }> {
const sketchDir = path.join(this.CB_TMP, 'sketch');
const sketchPath = path.join(sketchDir, 'sketch.ino');
if (!fs.existsSync(sketchDir)) fs.mkdirSync(sketchDir, { recursive: true });
fs.writeFileSync(sketchPath, code);
return this.compileSketch(sketchPath, device, progressCallback, minimal);
}
/**
* Compiles the specified Arduino sketch.
*
* Returns a promise. On success, resolves with the following object:
* { binary: path to the compiled binary, stdout: array of status strings returned by the compiler,
* stderr: array of error/warning messages returned by the compiler}
*
* On error rejects with an error object with an additional stderr array containing error/warning messages.
*
* @param sketchPath Absolute path to the sketch to be compiled.
* @param device fqbn
* @param progressCallback callback for progress reporting. Takes a single argument which represents percentage (0-100)
*/
public static compileSketch(
sketchPath: string,
device: string,
progressCallback?: (number) => void,
minimal?: boolean
): Promise<{ binary: string; stdout: string[]; stderr: string[] }> {
const pathParsed = path.parse(sketchPath);
const sketchName = pathParsed.name;
const sketchDir = pathParsed.dir;
let buildPath;
let cachePath;
if(minimal){
cachePath = path.join(this.CB_TMP, 'cache-minimal');
buildPath = path.join(this.CB_TMP, 'build-minimal', sketchName);
}else{
cachePath = path.join(this.CB_TMP, 'cache');
buildPath = path.join(this.CB_TMP, 'build', sketchName);
}
const compiledPath: string = path.join(buildPath, sketchName + '.ino.bin');
if (!fs.existsSync(buildPath)) fs.mkdirSync(buildPath, { recursive: true });
if (!fs.existsSync(cachePath)) fs.mkdirSync(cachePath, { recursive: true });
const time = 20; // approx. time to compile
const promise: Promise<{ binary: string; stdout: string[]; stderr: string[] }> = new Promise((resolve, reject) => {
if (this.instance == undefined) {
reject(new Error('Daemon not started'));
return;
}
let currentProgress = 0;
let progPerTenthSec = 100 / (time * 10);
let finished = false;
let resolveObject = undefined;
let stage = 1;
function popProgress() {
if (stage == 1 && currentProgress >= 80 && !finished) {
progPerTenthSec /= 10;
stage++;
} else if (stage == 2 && currentProgress >= 90 && !finished) {
progPerTenthSec /= 10;
stage++;
} else if (stage == 3 && currentProgress >= 95 && !finished) {
progPerTenthSec /= 10;
stage++;
}
if (currentProgress < 100) {
currentProgress += progPerTenthSec;
currentProgress = Math.min(currentProgress, 100);
}
progressCallback(currentProgress);
if (currentProgress >= 100 && finished) {
clearInterval(progInterval);
resolve(resolveObject);
}
}
let progInterval;
if (progressCallback) {
progInterval = setInterval(popProgress, 100);
}
console.log("Compiling for", device);
logger.log("Compiling for " + device);
const req = new CompileReq();
req.setInstance(this.instance);
req.setSketchpath(sketchDir);
req.setBuildcachepath(cachePath);
req.setBuildpath(buildPath);
req.setFqbn(device);
req.setExportfile(path.join(buildPath, 'export'));
req.setVerbose(true);
if(minimal && device == "cm:esp32:ringo"){
logger.log("compiling minimal");
req.setBuildpropertiesList([ "compiler.c.extra_flags=-DMPMINIMAL", "compiler.cpp.extra_flags=-DMPMINIMAL", "compiler.c.elf.extra_flags=-DMPMINIMAL" ]);
}
const stream = this.client.compile(req);
const stdout: string[] = [];
const stderr: string[] = [];
let fulfilled = false;
let error: any;
const decoder = new TextDecoder('utf-8');
stream.on('error', (data) => {
error = data;
});
stream.on('data', (data: CompileResp) => {
function write(what: Uint8Array | string, where: string[]) {
if (what instanceof Uint8Array) {
what = decoder.decode(what);
}
where.push(what);
}
if (data.getOutStream().length != 0) {
write(data.getOutStream(), stdout);
} else {
write(data.getErrStream(), stderr);
}
});
stream.on('end', () => {
if (fulfilled) return;
fulfilled = true;
error.stderr = stderr;
error.stdout = stdout;
if (progInterval) clearInterval(progInterval);
reject(error);
});
stream.on('status', (data) => {
fulfilled = true;
if (data.code === 0) {
resolveObject = { binary: compiledPath, stdout, stderr };
if (progInterval) {
finished = true;
progPerTenthSec = 100 / (time * 10);
clearInterval(progInterval);
progInterval = setInterval(popProgress, 10);
} else {
resolve(resolveObject);
}
} else {
error.stderr = stderr;
error.stdout = stdout;
if (progInterval) clearInterval(progInterval);
reject(error);
}
});
});
promise.catch(err => {
logger.log("CLI compile error", err);
throw err;
});
return promise;
}
/**
* Uploads the specified binary to the MAKERphone
* @param binary Path to the binary
* @param port MAKERphone port
* @param device fqbn
* @param progressCallback callback for progress reporting. Takes a single argument which represents percentage (0-100)
*/
public static uploadBinary(
binary: string,
port: string,
device: string,
progressCallback?: (number) => void
): Promise<null> {
const promise = new Promise<null>((resolve, reject) => {
if (!fs.existsSync(binary)) {
reject(new Error("Binary doesn't exist"));
return;
}
const serial = this.getSerial();
serial.stop();
serial.setUploading(true);
console.log("Uploading to", device);
if(device == "cm:esp8266:nibble"){
device += ":baud=921600";
}
const req = new UploadReq();
req.setInstance(this.instance);
req.setFqbn(device);
req.setImportFile(binary);
req.setSketchPath(binary);
req.setPort(port);
req.setVerify(true);
const stream = this.client.upload(req);
const stdout: string[] = [];
const stderr: string[] = [];
let fulfilled = false;
let error: any;
const decoder = new TextDecoder('utf-8');
const progRegex = new RegExp('Writing at 0x([0-9a-f]{8})... \\((\\d+) %\\)');
let progressStarted = false;
stream.on('error', (data) => {
error = data;
});
stream.on('data', (data: UploadResp) => {
function write(what: Uint8Array | string, where: string[]) {
if (what instanceof Uint8Array) {
what = decoder.decode(what);
}
where.push(what);
if (!progressCallback) return;
const matches = progRegex.exec(what);
if (matches) {
if (matches[1] == '00010000') {
progressStarted = true;
} else if (matches[1] == '00008000') {
progressStarted = false;
}
if (progressStarted) {
const val = parseInt(matches[2]);
progressCallback(val);
if (val == 100) {
progressStarted = false;
}
}
}
}
if (data.getOutStream().length != 0) {
write(data.getOutStream(), stdout);
} else {
write(data.getErrStream(), stderr);
}
});
stream.on('end', () => {
if (fulfilled) return;
fulfilled = true;
error.stdout = stdout;
error.stderr = stderr;
reject(error);
});
stream.on('status', (data) => {
fulfilled = true;
if (data.code === 0) {
resolve();
} else {
error.stdout = stdout;
error.stderr = stderr;
reject(error);
}
});
});
promise.catch(err => {
logger.log("CLI upload error", err);
throw err;
});
promise.finally(() => { this.getSerial().setUploading(false); this.getSerial().start() });
return promise;
}
} | the_stack |
export enum UserSettingName {
/**
* The display name of the user.
*/
UserDisplayName = 0,
/**
* The legacy distinguished name of the user.
*/
UserDN = 1,
/**
* The deployment Id of the user.
*/
UserDeploymentId = 2,
/**
* The fully qualified domain name of the mailbox server.
*/
InternalMailboxServer = 3,
/**
* The fully qualified domain name of the RPC client server.
*/
InternalRpcClientServer = 4,
/**
* The legacy distinguished name of the mailbox server.
*/
InternalMailboxServerDN = 5,
/**
* The internal URL of the Exchange Control Panel.
*/
InternalEcpUrl = 6,
/**
* The internal URL of the Exchange Control Panel for VoiceMail Customization.
*/
InternalEcpVoicemailUrl = 7,
/**
* The internal URL of the Exchange Control Panel for Email Subscriptions.
*/
InternalEcpEmailSubscriptionsUrl = 8,
/**
* The internal URL of the Exchange Control Panel for Text Messaging.
*/
InternalEcpTextMessagingUrl = 9,
/**
* The internal URL of the Exchange Control Panel for Delivery Reports.
*/
InternalEcpDeliveryReportUrl = 10,
/**
* The internal URL of the Exchange Control Panel for RetentionPolicy Tags.
*/
InternalEcpRetentionPolicyTagsUrl = 11,
/**
* The internal URL of the Exchange Control Panel for Publishing.
*/
InternalEcpPublishingUrl = 12,
/**
* The internal URL of the Exchange Control Panel for photos.
*/
InternalEcpPhotoUrl = 13,
/**
* The internal URL of the Exchange Control Panel for People Connect subscriptions.
*/
InternalEcpConnectUrl = 14,
/**
* The internal URL of the Exchange Control Panel for Team Mailbox.
*/
InternalEcpTeamMailboxUrl = 15,
/**
* The internal URL of the Exchange Control Panel for creating Team Mailbox.
*/
InternalEcpTeamMailboxCreatingUrl = 16,
/**
* The internal URL of the Exchange Control Panel for editing Team Mailbox.
*/
InternalEcpTeamMailboxEditingUrl = 17,
/**
* The internal URL of the Exchange Control Panel for hiding Team Mailbox.
*/
InternalEcpTeamMailboxHidingUrl = 18,
/**
* The internal URL of the Exchange Control Panel for the extension installation.
*/
InternalEcpExtensionInstallationUrl = 19,
/**
* The internal URL of the Exchange Web Services.
*/
InternalEwsUrl = 20,
/**
* The internal URL of the Exchange Management Web Services.
*/
InternalEmwsUrl = 21,
/**
* The internal URL of the Offline Address Book.
*/
InternalOABUrl = 22,
/**
* The internal URL of the Photos service.
*/
InternalPhotosUrl = 23,
/**
* The internal URL of the Unified Messaging services.
*/
InternalUMUrl = 24,
/**
* The internal URLs of the Exchange web client.
*/
InternalWebClientUrls = 25,
/**
* The distinguished name of the mailbox database of the user's mailbox.
*/
MailboxDN = 26,
/**
* The name of the Public Folders server.
*/
PublicFolderServer = 27,
/**
* The name of the Active Directory server.
*/
ActiveDirectoryServer = 28,
/**
* The name of the RPC over HTTP server.
*/
ExternalMailboxServer = 29,
/**
* Indicates whether the RPC over HTTP server requires SSL.
*/
ExternalMailboxServerRequiresSSL = 30,
/**
* The authentication methods supported by the RPC over HTTP server.
*/
ExternalMailboxServerAuthenticationMethods = 31,
/**
* The URL fragment of the Exchange Control Panel for VoiceMail Customization.
*/
EcpVoicemailUrlFragment = 32,
/**
* The URL fragment of the Exchange Control Panel for Email Subscriptions.
*/
EcpEmailSubscriptionsUrlFragment = 33,
/**
* The URL fragment of the Exchange Control Panel for Text Messaging.
*/
EcpTextMessagingUrlFragment = 34,
/**
* The URL fragment of the Exchange Control Panel for Delivery Reports.
*/
EcpDeliveryReportUrlFragment = 35,
/**
* The URL fragment of the Exchange Control Panel for RetentionPolicy Tags.
*/
EcpRetentionPolicyTagsUrlFragment = 36,
/**
* The URL fragment of the Exchange Control Panel for Publishing.
*/
EcpPublishingUrlFragment = 37,
/**
* The URL fragment of the Exchange Control Panel for photos.
*/
EcpPhotoUrlFragment = 38,
/**
* The URL fragment of the Exchange Control Panel for People Connect.
*/
EcpConnectUrlFragment = 39,
/**
* The URL fragment of the Exchange Control Panel for Team Mailbox.
*/
EcpTeamMailboxUrlFragment = 40,
/**
* The URL fragment of the Exchange Control Panel for creating Team Mailbox.
*/
EcpTeamMailboxCreatingUrlFragment = 41,
/**
* The URL fragment of the Exchange Control Panel for editing Team Mailbox.
*/
EcpTeamMailboxEditingUrlFragment = 42,
/**
* The URL fragment of the Exchange Control Panel for installing extension.
*/
EcpExtensionInstallationUrlFragment = 43,
/**
* The external URL of the Exchange Control Panel.
*/
ExternalEcpUrl = 44,
/**
* The external URL of the Exchange Control Panel for VoiceMail Customization.
*/
ExternalEcpVoicemailUrl = 45,
/**
* The external URL of the Exchange Control Panel for Email Subscriptions.
*/
ExternalEcpEmailSubscriptionsUrl = 46,
/**
* The external URL of the Exchange Control Panel for Text Messaging.
*/
ExternalEcpTextMessagingUrl = 47,
/**
* The external URL of the Exchange Control Panel for Delivery Reports.
*/
ExternalEcpDeliveryReportUrl = 48,
/**
* The external URL of the Exchange Control Panel for RetentionPolicy Tags.
*/
ExternalEcpRetentionPolicyTagsUrl = 49,
/**
* The external URL of the Exchange Control Panel for Publishing.
*/
ExternalEcpPublishingUrl = 50,
/**
* The external URL of the Exchange Control Panel for photos.
*/
ExternalEcpPhotoUrl = 51,
/**
* The external URL of the Exchange Control Panel for People Connect subscriptions.
*/
ExternalEcpConnectUrl = 52,
/**
* The external URL of the Exchange Control Panel for Team Mailbox.
*/
ExternalEcpTeamMailboxUrl = 53,
/**
* The external URL of the Exchange Control Panel for creating Team Mailbox.
*/
ExternalEcpTeamMailboxCreatingUrl = 54,
/**
* The external URL of the Exchange Control Panel for editing Team Mailbox.
*/
ExternalEcpTeamMailboxEditingUrl = 55,
/**
* The external URL of the Exchange Control Panel for hiding Team Mailbox.
*/
ExternalEcpTeamMailboxHidingUrl = 56,
/**
* The external URL of the Exchange Control Panel for the extension installation.
*/
ExternalEcpExtensionInstallationUrl = 57,
/**
* The external URL of the Exchange Web Services.
*/
ExternalEwsUrl = 58,
/**
* The external URL of the Exchange Management Web Services.
*/
ExternalEmwsUrl = 59,
/**
* The external URL of the Offline Address Book.
*/
ExternalOABUrl = 60,
/**
* The external URL of the Photos service.
*/
ExternalPhotosUrl = 61,
/**
* The external URL of the Unified Messaging services.
*/
ExternalUMUrl = 62,
/**
* The external URLs of the Exchange web client.
*/
ExternalWebClientUrls = 63,
/**
* Indicates that cross-organization sharing is enabled.
*/
CrossOrganizationSharingEnabled = 64,
/**
* Collection of alternate mailboxes.
*/
AlternateMailboxes = 65,
/**
* The version of the Client Access Server serving the request (e.g. 14.XX.YYY.ZZZ)
*/
CasVersion = 66,
/**
* Comma-separated list of schema versions supported by Exchange Web Services. The schema version values /// will be the same as the values of the ExchangeServerVersion enumeration.
*/
EwsSupportedSchemas = 67,
/**
* The internal connection settings list for pop protocol
*/
InternalPop3Connections = 68,
/**
* The external connection settings list for pop protocol
*/
ExternalPop3Connections = 69,
/**
* The internal connection settings list for imap4 protocol
*/
InternalImap4Connections = 70,
/**
* The external connection settings list for imap4 protocol
*/
ExternalImap4Connections = 71,
/**
* The internal connection settings list for smtp protocol
*/
InternalSmtpConnections = 72,
/**
* The external connection settings list for smtp protocol
*/
ExternalSmtpConnections = 73,
/**
* If set to "Off" then clients should not connect via this protocol. /// The protocol contents are for informational purposes only.
*/
InternalServerExclusiveConnect = 74,
/**
* The version of the Exchange Web Services server ExternalEwsUrl is pointing to.
*/
ExternalEwsVersion = 75,
/**
* Mobile Mailbox policy settings.
*/
MobileMailboxPolicy = 76,
/**
* Document sharing locations and their settings.
*/
DocumentSharingLocations = 77,
/**
* Whether the user account is an MSOnline account.
*/
UserMSOnline = 78,
/**
* The authentication methods supported by the RPC client server.
*/
InternalMailboxServerAuthenticationMethods = 79,
/**
* Version of the server hosting the user's mailbox.
*/
MailboxVersion = 80,
/**
* Sharepoint MySite Host URL.
*/
SPMySiteHostURL = 81,
/**
* Site mailbox creation URL in SharePoint.
*/
/// </summary>
SiteMailboxCreationURL = 82,
/**
* The FQDN of the server used for internal RPC/HTTP connectivity.
*/
InternalRpcHttpServer = 83,
/**
* Indicates whether SSL is required for internal RPC/HTTP connectivity.
*/
InternalRpcHttpConnectivityRequiresSsl = 84,
/**
* The authentication method used for internal RPC/HTTP connectivity.
*/
InternalRpcHttpAuthenticationMethod = 85,
/**
* If set to "On" then clients should only connect via this protocol.
*/
ExternalServerExclusiveConnect = 86,
/**
* If set, then clients can call the server via XTC
*/
ExchangeRpcUrl = 87,
/**
* If set to false then clients should not show the GAL by default, but show the contact list.
*/
ShowGalAsDefaultView = 88,
/**
* AutoDiscover Primary SMTP Address for the user.
*/
AutoDiscoverSMTPAddress = 89,
/**
* The 'interop' external URL of the Exchange Web Services. /// By interop it means a URL to E14 (or later) server that can serve mailboxes /// that are hosted in downlevel server (E2K3 and earlier).
*/
InteropExternalEwsUrl = 90,
/**
* Version of server InteropExternalEwsUrl is pointing to.
*/
InteropExternalEwsVersion = 91,
/**
* Public Folder (Hierarchy) information
*/
PublicFolderInformation = 92,
/**
* The version appropriate URL of the AutoDiscover service that should answer this query.
*/
RedirectUrl = 93,
/**
* The URL of the Exchange Web Services for Office365 partners.
*/
EwsPartnerUrl = 94,
/**
* SSL certificate name
*/
CertPrincipalName = 95,
/**
* The grouping hint for certain clients.
*/
GroupingInformation = 96,
/**
* MapiHttpEnabled ///todo: not available any more in official repo
*/
MapiHttpEnabled = 97,
/**
* Internal OutlookService URL
*/
InternalOutlookServiceUrl = 98,
/**
* External OutlookService URL
*/
ExternalOutlookServiceUrl = 99
} | the_stack |
import { ether, expectEvent, expectRevert } from '@openzeppelin/test-helpers'
import { toBN } from 'web3-utils'
import { expect } from 'chai'
import { evmMineMany, revert, snapshot } from './TestUtils'
import BN from 'bn.js'
import { StakeManagerInstance, TestTokenInstance } from '@opengsn/contracts/types/truffle-contracts'
import { defaultEnvironment } from '@opengsn/common/dist/Environments'
import { constants, toNumber } from '@opengsn/common'
import { balanceTrackerErc20 } from './utils/ERC20BalanceTracker'
const StakeManager = artifacts.require('StakeManager')
const TestToken = artifacts.require('TestToken')
contract('StakeManager', function ([burnAddress, relayManager, anyRelayHub, owner, nonOwner, secondRelayManager]) {
const initialUnstakeDelay = new BN(4)
const initialStake = ether('1')
let stakeManager: StakeManagerInstance
let testToken: TestTokenInstance
function testNotOwnerCannotStake (): void {
it('should not allow not owner to stake for owned relayManager address', async function () {
await expectRevert(
stakeManager.stakeForRelayManager(testToken.address, relayManager, initialUnstakeDelay, 0, { from: nonOwner }),
'not owner'
)
})
}
function testOwnerCanStake (): void {
it('should allow owner to stake for unowned addresses', async function () {
const ownerStakeBalanceBefore = await testToken.balanceOf(owner)
const stakeManagerStakeBalanceBefore = await testToken.balanceOf(stakeManager.address)
const { tx } = await stakeManager.stakeForRelayManager(testToken.address, relayManager, initialUnstakeDelay, initialStake, {
from: owner
})
expectEvent.inTransaction(tx, StakeManager, 'StakeAdded', {
relayManager,
owner,
stake: initialStake,
unstakeDelay: initialUnstakeDelay
})
await expectEvent.inTransaction(tx, TestToken, 'Transfer', {
from: owner,
to: stakeManager.address,
value: initialStake
})
const ownerStakeBalanceAfter = await testToken.balanceOf(owner)
const stakeManagerStakeBalanceAfter = await testToken.balanceOf(stakeManager.address)
assert.equal(stakeManagerStakeBalanceAfter.sub(stakeManagerStakeBalanceBefore).toString(), initialStake.toString())
assert.equal(ownerStakeBalanceBefore.sub(ownerStakeBalanceAfter).toString(), initialStake.toString())
})
}
function testCanPenalize (): void {
it('should allow to penalize hub', async function () {
const beneficiaryBalanceTracker = await balanceTrackerErc20(testToken.address, nonOwner)
const stakeManagerBalanceTracker = await balanceTrackerErc20(testToken.address, stakeManager.address)
const remainingStake = new BN(100)
const penaltyAmount = initialStake.sub(remainingStake)
const { logs } = await stakeManager.penalizeRelayManager(relayManager, nonOwner, penaltyAmount, {
from: anyRelayHub
})
const expectedReward = penaltyAmount.divn(2)
expectEvent.inLogs(logs, 'StakePenalized', {
relayManager,
beneficiary: nonOwner,
reward: expectedReward
})
const relayOwnerGain = await beneficiaryBalanceTracker.delta()
const stakeManagerLoss = await stakeManagerBalanceTracker.delta()
expect(relayOwnerGain).to.be.bignumber.equal(expectedReward)
expect(stakeManagerLoss).to.be.bignumber.equal(penaltyAmount.neg())
// @ts-ignore
const { stake: actualStake, unstakeDelay: actualUnstakeDelay, owner: actualOwner } =
await stakeManager.stakes(relayManager)
expect(actualOwner).to.equal(owner)
expect(actualStake).to.be.bignumber.equal(remainingStake)
expect(actualUnstakeDelay).to.be.bignumber.equal(initialUnstakeDelay)
})
}
describe('with no stake for relay server', function () {
beforeEach(async function () {
testToken = await TestToken.new()
stakeManager = await StakeManager.new(defaultEnvironment.maxUnstakeDelay, 0, 0, constants.BURN_ADDRESS, constants.BURN_ADDRESS)
await testToken.mint(constants.MAX_INT256, { from: owner })
await testToken.approve(stakeManager.address, constants.MAX_INT256, { from: owner })
})
it('should not allow anyone to stake before owner is set', async function () {
await expectRevert(
stakeManager.stakeForRelayManager(testToken.address, relayManager, initialUnstakeDelay, initialStake, {
from: owner
}),
'not owner'
)
})
it('should allow manager to set its owner', async function () {
const { logs } = await stakeManager.setRelayManagerOwner(owner, { from: relayManager })
expectEvent.inLogs(logs, 'OwnerSet', {
relayManager,
owner
})
})
it('should not allow manager to change its owner', async function () {
await stakeManager.setRelayManagerOwner(owner, { from: relayManager })
await expectRevert(
stakeManager.setRelayManagerOwner(owner, {
from: relayManager
}),
'already owned'
)
})
it('should not allow not owner to schedule unlock', async function () {
await expectRevert(
stakeManager.unlockStake(relayManager, { from: nonOwner }),
'not owner'
)
})
context('with owner set', function () {
beforeEach(async function () {
await stakeManager.setRelayManagerOwner(owner, { from: relayManager })
})
it('should not allow owner to stake with an unstake delay exceeding maximum', async function () {
await expectRevert(
stakeManager.stakeForRelayManager(testToken.address, relayManager, defaultEnvironment.maxUnstakeDelay + 1, initialStake, {
from: owner
}),
'unstakeDelay too big'
)
})
it('should not allow owner to stake without token specified', async function () {
await expectRevert(
stakeManager.stakeForRelayManager(constants.ZERO_ADDRESS, relayManager, defaultEnvironment.maxUnstakeDelay, initialStake, {
from: owner
}),
'must specify stake token address'
)
})
it('should not allow owner to stake with token that does not match the previously used', async function () {
const id = (await snapshot()).result
// set the stake token for the first time
await stakeManager.stakeForRelayManager(testToken.address, relayManager, defaultEnvironment.maxUnstakeDelay, initialStake, {
from: owner
})
await expectRevert(
stakeManager.stakeForRelayManager(constants.BURN_ADDRESS, relayManager, defaultEnvironment.maxUnstakeDelay, initialStake, {
from: owner
}),
'stake token address is incorrect'
)
await revert(id)
})
it('should not allow owner to stake with msg.value passed', async function () {
await expectRevert(
stakeManager.stakeForRelayManager(testToken.address, relayManager, defaultEnvironment.maxUnstakeDelay, initialStake, {
from: owner,
value: '1'
}),
// the method is no longer 'payable'; test is here to prevent regression on merge, etc.
'Transaction reverted without a reason string'
)
})
testOwnerCanStake()
})
})
describe('with stake deposited for relay server', function () {
beforeEach(async function () {
stakeManager = await StakeManager.new(defaultEnvironment.maxUnstakeDelay, 0, 0, constants.BURN_ADDRESS, constants.BURN_ADDRESS)
await testToken.approve(stakeManager.address, constants.MAX_INT256, { from: owner })
await stakeManager.setRelayManagerOwner(owner, { from: relayManager })
await stakeManager.stakeForRelayManager(testToken.address, relayManager, initialUnstakeDelay, initialStake, {
from: owner
})
})
it('should not allow to penalize hub', async function () {
await expectRevert(
stakeManager.penalizeRelayManager(relayManager, nonOwner, initialStake, { from: anyRelayHub }),
'hub not authorized'
)
})
it('should allow querying relayManager\'s stake', async function () {
// @ts-ignore (typechain does not declare names or iterator for return types)
const { stake: actualStake, unstakeDelay: actualUnstakeDelay, owner: actualOwner } =
await stakeManager.stakes(relayManager)
expect(actualOwner).to.equal(owner)
expect(actualStake).to.be.bignumber.equal(initialStake)
expect(actualUnstakeDelay).to.be.bignumber.equal(initialUnstakeDelay)
})
it('should allow owner to stake for a second manager with a different token', async function () {
await stakeManager.setRelayManagerOwner(owner, { from: secondRelayManager })
const { logs } = await stakeManager.stakeForRelayManager(testToken.address, secondRelayManager, initialUnstakeDelay, initialStake, {
from: owner
})
expectEvent.inLogs(logs, 'StakeAdded', {
relayManager: secondRelayManager,
owner,
stake: initialStake,
unstakeDelay: initialUnstakeDelay
})
})
it('owner can increase the relay stake', async function () {
const addedStake = ether('2')
const stake = initialStake.add(addedStake)
const { logs } = await stakeManager.stakeForRelayManager(testToken.address, relayManager, initialUnstakeDelay, addedStake, {
from: owner
})
expectEvent.inLogs(logs, 'StakeAdded', {
relayManager,
stake,
unstakeDelay: initialUnstakeDelay
})
// @ts-ignore (typechain does not declare names or iterator for return types)
const { stake: actualStake } = await stakeManager.stakes(relayManager)
expect(actualStake).to.be.bignumber.equal(initialStake.add(addedStake))
})
it('should allow owner to increase the unstake delay', async function () {
const newUnstakeDelay = new BN(5)
const { logs } = await stakeManager.stakeForRelayManager(testToken.address, relayManager, newUnstakeDelay, 0, { from: owner })
expectEvent.inLogs(logs, 'StakeAdded', {
relayManager,
stake: initialStake,
unstakeDelay: newUnstakeDelay
})
// @ts-ignore (typechain does not declare names or iterator for return types)
const { unstakeDelay: actualUnstakeDelay } = await stakeManager.stakes(relayManager)
expect(actualUnstakeDelay).to.be.bignumber.equal(newUnstakeDelay)
})
it('should not allow owner to decrease the unstake delay', async function () {
await expectRevert(
stakeManager.stakeForRelayManager(testToken.address, relayManager, initialUnstakeDelay.subn(1), 0, { from: owner }),
'unstakeDelay cannot be decreased'
)
})
testNotOwnerCannotStake()
it('should not allow owner to withdraw stakes when not scheduled', async function () {
await expectRevert(stakeManager.withdrawStake(relayManager, { from: owner }), 'Withdrawal is not scheduled')
})
it('should allow relayOwner to authorize new relay hub', async function () {
const { logs } = await stakeManager.authorizeHubByOwner(relayManager, anyRelayHub, { from: owner })
expectEvent.inLogs(logs, 'HubAuthorized', {
relayManager,
relayHub: anyRelayHub
})
})
it('should allow relayManager to authorize new relay hub', async function () {
const { logs } = await stakeManager.authorizeHubByManager(anyRelayHub, { from: relayManager })
expectEvent.inLogs(logs, 'HubAuthorized', {
relayManager,
relayHub: anyRelayHub
})
})
describe('should not allow not owner to call to', function () {
it('unlock stake', async function () {
await expectRevert(stakeManager.unlockStake(relayManager, { from: nonOwner }), 'not owner')
})
it('withdraw stake', async function () {
await expectRevert(stakeManager.withdrawStake(relayManager, { from: nonOwner }), 'not owner')
})
})
describe('should not allow not owner to call to', function () {
it('authorize hub by owner', async function () {
await expectRevert(stakeManager.authorizeHubByOwner(relayManager, anyRelayHub, { from: nonOwner }), 'not owner')
})
it('unauthorize hub by owner', async function () {
await expectRevert(stakeManager.unauthorizeHubByOwner(relayManager, anyRelayHub, { from: nonOwner }), 'not owner')
})
})
describe('should not allow not manager to call to', function () {
it('authorize hub by manager', async function () {
await expectRevert(stakeManager.authorizeHubByManager(anyRelayHub, { from: nonOwner }), 'not manager')
})
it('unauthorize hub by manager', async function () {
await expectRevert(stakeManager.unauthorizeHubByManager(anyRelayHub, { from: nonOwner }), 'not manager')
})
})
})
describe('with authorized hub', function () {
beforeEach(async function () {
stakeManager = await StakeManager.new(defaultEnvironment.maxUnstakeDelay, 0, 0, constants.BURN_ADDRESS, constants.BURN_ADDRESS)
await testToken.approve(stakeManager.address, constants.MAX_INT256, { from: owner })
await stakeManager.setRelayManagerOwner(owner, { from: relayManager })
await stakeManager.stakeForRelayManager(testToken.address, relayManager, initialUnstakeDelay, initialStake, {
from: owner
})
await stakeManager.authorizeHubByOwner(relayManager, anyRelayHub, { from: owner })
})
it('should allow querying relayManager\'s authorized hubs', async function () {
const authorizedHubWithdrawal = await stakeManager.authorizedHubs(relayManager, anyRelayHub)
const notAuthorizedHubWithdrawal = await stakeManager.authorizedHubs(relayManager, nonOwner)
const uint32FF = new BN('f'.repeat(64), 'hex')
expect(authorizedHubWithdrawal).to.be.bignumber.equal(uint32FF)
expect(notAuthorizedHubWithdrawal).to.be.bignumber.equal(new BN(0))
})
it('should not allow to penalize for more than the relayManager stake', async function () {
await expectRevert(
stakeManager.penalizeRelayManager(relayManager, nonOwner, initialStake.muln(2), { from: anyRelayHub }),
'penalty exceeds stake'
)
})
testCanPenalize()
it('should allow relayOwner to unauthorize an authorized hub', async function () {
const { logs, receipt } = await stakeManager.unauthorizeHubByOwner(relayManager, anyRelayHub, { from: owner })
const minedInBlock = await web3.eth.getBlock(receipt.blockNumber)
const minedBlockTimestamp = toNumber(minedInBlock.timestamp)
const removalTime = initialUnstakeDelay.add(toBN(minedBlockTimestamp))
expectEvent.inLogs(logs, 'HubUnauthorized', {
relayManager,
relayHub: anyRelayHub,
removalTime
})
})
it('should allow relayManager to unauthorize an authorized hub', async function () {
const { logs, receipt } = await stakeManager.unauthorizeHubByManager(anyRelayHub, { from: relayManager })
const minedInBlock = await web3.eth.getBlock(receipt.blockNumber)
const minedBlockTimestamp = toNumber(minedInBlock.timestamp)
const removalTime = initialUnstakeDelay.add(toBN(minedBlockTimestamp))
expectEvent.inLogs(logs, 'HubUnauthorized', {
relayManager,
relayHub: anyRelayHub,
removalTime
})
})
it('should not allow owner to unauthorize non-authorized hub', async function () {
await expectRevert(stakeManager.unauthorizeHubByOwner(relayManager, nonOwner, { from: owner }), 'hub not authorized')
})
it('should allow owner to schedule stake unlock', async function () {
const { logs, receipt } = await stakeManager.unlockStake(relayManager, { from: owner })
const minedInBlock = await web3.eth.getBlock(receipt.blockNumber)
const minedBlockTimestamp = toNumber(minedInBlock.timestamp)
const withdrawTime = initialUnstakeDelay.add(toBN(minedBlockTimestamp))
expectEvent.inLogs(logs, 'StakeUnlocked', {
relayManager,
owner,
withdrawTime
})
})
})
describe('with scheduled deauthorization of an authorized hub', function () {
beforeEach(async function () {
stakeManager = await StakeManager.new(defaultEnvironment.maxUnstakeDelay, 0, 0, constants.BURN_ADDRESS, constants.BURN_ADDRESS)
await testToken.approve(stakeManager.address, constants.MAX_INT256, { from: owner })
await stakeManager.setRelayManagerOwner(owner, { from: relayManager })
await stakeManager.stakeForRelayManager(testToken.address, relayManager, initialUnstakeDelay, initialStake, {
from: owner
})
await stakeManager.authorizeHubByOwner(relayManager, anyRelayHub, { from: owner })
await stakeManager.unauthorizeHubByOwner(relayManager, anyRelayHub, { from: owner })
})
testCanPenalize()
it('should not allow owner to unauthorize hub again', async function () {
await expectRevert(stakeManager.unauthorizeHubByOwner(relayManager, anyRelayHub, { from: owner }), 'hub not authorized')
})
describe('after grace period elapses', function () {
beforeEach(async function () {
await evmMineMany(initialUnstakeDelay.toNumber())
})
it('should not allow to penalize hub', async function () {
await expectRevert(
stakeManager.penalizeRelayManager(relayManager, nonOwner, initialStake, { from: anyRelayHub }),
'hub authorization expired'
)
})
})
})
describe('with scheduled unlock while hub still authorized', function () {
beforeEach(async function () {
stakeManager = await StakeManager.new(defaultEnvironment.maxUnstakeDelay, 0, 0, constants.BURN_ADDRESS, constants.BURN_ADDRESS)
await testToken.approve(stakeManager.address, constants.MAX_INT256, { from: owner })
await stakeManager.setRelayManagerOwner(owner, { from: relayManager })
await stakeManager.stakeForRelayManager(testToken.address, relayManager, initialUnstakeDelay, initialStake, {
from: owner
})
await stakeManager.authorizeHubByOwner(relayManager, anyRelayHub, { from: owner })
await stakeManager.unlockStake(relayManager, { from: owner })
})
it('should not allow owner to schedule unlock again', async function () {
await expectRevert(
stakeManager.unlockStake(relayManager, { from: owner }),
'already pending'
)
})
it('should not allow owner to withdraw stakes before it is due', async function () {
await expectRevert(stakeManager.withdrawStake(relayManager, { from: owner }), 'Withdrawal is not due')
})
testCanPenalize()
it('should allow to withdraw stake after unstakeDelay', async function () {
await evmMineMany(initialUnstakeDelay.toNumber())
const relayOwnerBalanceTracker = await balanceTrackerErc20(testToken.address, owner)
const stakeManagerBalanceTracker = await balanceTrackerErc20(testToken.address, stakeManager.address)
const res = await stakeManager.withdrawStake(relayManager, {
from: owner
})
expectEvent.inLogs(res.logs, 'StakeWithdrawn', {
relayManager,
amount: initialStake
})
const relayOwnerGain = await relayOwnerBalanceTracker.delta()
const stakeManagerLoss = await stakeManagerBalanceTracker.delta()
expect(relayOwnerGain).to.be.bignumber.equal(initialStake)
expect(stakeManagerLoss).to.be.bignumber.equal(initialStake.neg())
})
describe('with stake withdrawn', function () {
beforeEach(async function () {
await evmMineMany(initialUnstakeDelay.toNumber())
await stakeManager.withdrawStake(relayManager, { from: owner })
})
it('should remove stake, unstake delay of removed relayManager, but remember the owner and token', async function () {
// @ts-ignore (typechain does not declare names or iterator for return types)
const { stake: actualStake, unstakeDelay: actualUnstakeDelay, withdrawTime: actualWithdrawTime, owner: actualOwner, token: actualToken } =
await stakeManager.stakes(relayManager)
// stake token, relay owner and unstake delay are kept
expect(actualToken).to.equal(testToken.address)
expect(actualOwner).to.equal(owner)
expect(actualUnstakeDelay).to.be.bignumber.equal(initialUnstakeDelay)
// staked amount and withdrawal block are reset
expect(actualStake).to.be.bignumber.equal(new BN(0))
expect(actualWithdrawTime).to.be.bignumber.equal(new BN(0))
})
testNotOwnerCannotStake()
testOwnerCanStake()
})
})
context('#setBurnAddress', function () {
before(async function () {
stakeManager = await StakeManager.new(defaultEnvironment.maxUnstakeDelay, 0, 0, constants.BURN_ADDRESS, constants.BURN_ADDRESS)
})
it('should allow owner to change burn address', async function () {
const { tx } = await stakeManager.setBurnAddress(burnAddress)
await expectEvent.inTransaction(tx, StakeManager, 'BurnAddressSet', {
burnAddress
})
})
it('should not allow non-owner to change burn address', async function () {
await expectRevert(
stakeManager.setBurnAddress(burnAddress, { from: nonOwner }),
'caller is not the owner'
)
})
})
}) | the_stack |
import Fuse from 'fuse.js';
import { EditorView } from 'prosemirror-view';
import { Fragment, Node, Schema } from 'prosemirror-model';
import {
addColumnAfter,
addColumnBefore,
deleteColumn,
addRowAfter,
addRowBefore,
deleteRow,
mergeCells,
splitCell,
toggleHeaderRow,
toggleHeaderColumn,
toggleHeaderCell,
deleteTable,
isInTable,
} from 'prosemirror-tables';
import { Transaction } from 'prosemirror-state';
import { openAutocomplete } from 'prosemirror-autocomplete';
import { nodeNames, createId } from '@curvenote/schema';
import { AppThunk } from '../../types';
import { LanguageNames } from '../../../views/types';
import { selectSuggestionState } from '../selectors';
import * as actions from '../../actions/editor';
import { ALL_COMMANDS, CommandResult, CommandNames } from '../commands';
import { createFigure, getLinkBoundsIfTheyExist } from '../../actions/utils';
import { getEditorView } from '../../state/selectors';
import { getYouTubeId, getMiroId, getLoomId, getVimeoId } from './utils';
import { opts } from '../../../connect';
import { addImagePrompt } from '../../../prosemirror/plugins/ImagePlaceholder';
const options = {
shouldSort: true,
threshold: 0.4,
location: 0,
distance: 100,
minMatchCharLength: 1,
keys: [
{
name: 'title',
weight: 0.6,
},
{
name: 'shortcut',
weight: 0.2,
},
{
name: 'description',
weight: 0.2,
},
],
};
const TABLE_ACTIONS: { [key: string]: any } = {
[CommandNames.add_column_before]: addColumnBefore,
[CommandNames.add_column_after]: addColumnAfter,
[CommandNames.delete_column]: deleteColumn,
[CommandNames.add_row_before]: addRowBefore,
[CommandNames.add_row_after]: addRowAfter,
[CommandNames.delete_row]: deleteRow,
[CommandNames.delete_table]: deleteTable,
[CommandNames.merge_cells]: mergeCells,
[CommandNames.split_cell]: splitCell,
[CommandNames.toggle_header_row]: toggleHeaderRow,
[CommandNames.toggle_header_column]: toggleHeaderColumn,
[CommandNames.toggle_header_cell]: toggleHeaderCell,
};
const filterCommands = (view: EditorView, results: CommandResult[]) => {
const filtered = results.filter((r) => r.available?.(view) ?? true);
return filtered;
};
export const startingSuggestions = (view: EditorView) => {
return filterCommands(view, ALL_COMMANDS);
};
export function executeCommand(
command: CommandNames,
viewOrId: EditorView | string | null,
removeText = () => true,
replace = false,
): AppThunk<Promise<boolean>> {
return async (dispatch, getState) => {
let view: EditorView;
if (viewOrId == null) return false;
if (typeof viewOrId === 'string') {
const ev = getEditorView(getState(), viewOrId);
if (ev.view == null) return false;
view = ev.view;
} else {
view = viewOrId;
}
const schema = view.state.schema as Schema;
const replaceOrInsert = replace ? actions.replaceSelection : actions.insertNode;
if (TABLE_ACTIONS[command] && isInTable(view.state)) {
removeText();
TABLE_ACTIONS[command](view.state, (tr: Transaction) => {
view.dispatch(tr.scrollIntoView());
view.focus();
});
return true;
}
switch (command) {
case CommandNames.footnote: {
removeText();
const { empty, $from, $to } = view.state.selection;
let content = Fragment.empty;
if (!empty && $from.sameParent($to) && $from.parent.inlineContent)
content = $from.parent.content.cut($from.parentOffset, $to.parentOffset);
const tr = view.state.tr.replaceSelectionWith(schema.nodes.footnote.create(null, content));
view.dispatch(tr);
view.dispatch(actions.selectNode(view.state.tr, empty));
view.focus();
return true;
}
case CommandNames.image: {
removeText();
addImagePrompt(view);
return true;
}
case CommandNames.insert_table: {
removeText();
const Table = schema.nodes[nodeNames.table];
const Row = schema.nodes[nodeNames.table_row];
const header = schema.nodes[nodeNames.table_header].createAndFill() as Node;
const cell = schema.nodes[nodeNames.table_cell].createAndFill() as Node;
const figure = createFigure(
schema,
Table.create(
{},
Fragment.fromArray([
Row.create({}, Fragment.fromArray([header, header])),
Row.create({}, Fragment.fromArray([cell, cell])),
]),
),
);
const tr = view.state.tr.replaceSelectionWith(figure).scrollIntoView();
view.dispatch(tr);
// TODO: change selection to the first cell!!
view.focus();
return true;
}
case CommandNames.link: {
removeText();
const linkBounds = getLinkBoundsIfTheyExist(view.state);
if (linkBounds) {
const { from, to } = linkBounds;
view.dispatch(view.state.tr.removeMark(from, to, schema.marks.link));
return true;
}
// eslint-disable-next-line no-alert
const href = prompt('Link Url?');
if (!href) return false;
const { from, to } = view.state.selection;
view.dispatch(view.state.tr.addMark(from, to, schema.marks.link.create({ href })));
return true;
}
case CommandNames.callout:
removeText();
dispatch(actions.wrapIn(schema.nodes.callout));
return true;
case CommandNames.aside:
removeText();
dispatch(actions.wrapIn(schema.nodes.aside));
return true;
case CommandNames.horizontal_rule:
removeText();
dispatch(replaceOrInsert(schema.nodes.horizontal_rule));
return true;
case CommandNames.paragraph:
removeText();
dispatch(actions.wrapInHeading(schema, 0));
return true;
case CommandNames.heading1:
case CommandNames.heading2:
case CommandNames.heading3:
case CommandNames.heading4:
case CommandNames.heading5:
case CommandNames.heading6:
removeText();
dispatch(actions.wrapInHeading(schema, Number.parseInt(command.slice(7), 10)));
return true;
case CommandNames.quote:
removeText();
dispatch(actions.wrapIn(schema.nodes.blockquote));
return true;
case CommandNames.time:
removeText();
dispatch(
actions.insertInlineNode(schema.nodes.time, { datetime: new Date() }, undefined, false),
);
dispatch(actions.insertText(' '));
return true;
case CommandNames.bullet_list:
removeText();
dispatch(actions.wrapIn(schema.nodes.bullet_list));
return true;
case CommandNames.ordered_list:
removeText();
dispatch(actions.wrapIn(schema.nodes.ordered_list));
return true;
case CommandNames.emoji:
removeText();
openAutocomplete(view, ':');
return true;
case CommandNames.math:
removeText();
dispatch(actions.insertNode(schema.nodes.math));
return true;
case CommandNames.equation:
removeText();
dispatch(replaceOrInsert(schema.nodes.equation, { id: createId() }));
return true;
case CommandNames.code:
removeText();
dispatch(
replaceOrInsert(schema.nodes.code_block, {
id: createId(),
language: LanguageNames.Python,
}),
);
return true;
case CommandNames.variable:
removeText();
dispatch(actions.insertVariable(schema, { name: 'myVar', value: '0', valueFunction: '' }));
return true;
case CommandNames.display:
removeText();
openAutocomplete(view, '{{');
return true;
case CommandNames.range: {
removeText();
// eslint-disable-next-line no-alert
const name = prompt('Name of the variable:') ?? 'myVar';
dispatch(
actions.insertInlineNode(schema.nodes.range, {
valueFunction: name,
changeFunction: `{${name}: value}`,
}),
);
return true;
}
case CommandNames.dynamic: {
removeText();
// eslint-disable-next-line no-alert
const name = prompt('Name of the variable:') ?? 'myVar';
dispatch(
actions.insertInlineNode(schema.nodes.dynamic, {
valueFunction: name,
changeFunction: `{${name}: value}`,
}),
);
return true;
}
case CommandNames.switch: {
removeText();
// eslint-disable-next-line no-alert
const name = prompt('Name of the variable:') ?? 'myVar';
dispatch(
actions.insertInlineNode(schema.nodes.switch, {
valueFunction: name,
changeFunction: `{${name}: value}`,
}),
);
return true;
}
case CommandNames.button: {
removeText();
dispatch(actions.insertInlineNode(schema.nodes.button, { clickFunction: '' }));
return true;
}
case CommandNames.youtube: {
removeText();
// eslint-disable-next-line no-alert
const url = prompt('Link to the YouTube video:');
if (!url) return true;
const id = getYouTubeId(url);
const src = `https://www.youtube-nocookie.com/embed/${id}`;
dispatch(actions.insertNode(schema.nodes.iframe, { src }));
return true;
}
case CommandNames.loom: {
removeText();
// eslint-disable-next-line no-alert
const url = prompt('Link to the Loom Video:');
if (!url) return true;
const id = getLoomId(url);
const src = `https://www.loom.com/embed/${id}`;
dispatch(actions.insertNode(schema.nodes.iframe, { src }));
return true;
}
case CommandNames.vimeo: {
removeText();
// eslint-disable-next-line no-alert
const url = prompt('Link to the Vimeo Video:');
if (!url) return true;
const id = getVimeoId(url);
const src = `https://player.vimeo.com/video/${id}`;
dispatch(actions.insertNode(schema.nodes.iframe, { src }));
return true;
}
case CommandNames.miro: {
removeText();
// eslint-disable-next-line no-alert
const url = prompt('Link to the Miro Board:');
if (!url) return true;
const id = getMiroId(url);
const src = `https://miro.com/app/live-embed/${id}`;
dispatch(actions.insertNode(schema.nodes.iframe, { src }));
return true;
}
case CommandNames.iframe: {
removeText();
// eslint-disable-next-line no-alert
const src = prompt('Link to the IFrame:');
if (!src) return true;
dispatch(actions.insertNode(schema.nodes.iframe, { src }));
return true;
}
case CommandNames.link_section:
removeText();
openAutocomplete(view, '[[', 'sec: ');
return true;
case CommandNames.link_figure:
removeText();
openAutocomplete(view, '[[', 'fig: ');
return true;
case CommandNames.link_equation:
removeText();
openAutocomplete(view, '[[', 'eq: ');
return true;
case CommandNames.link_code:
removeText();
openAutocomplete(view, '[[', 'code: ');
return true;
case CommandNames.link_article:
removeText();
openAutocomplete(view, '[[', 'article: ');
return true;
case CommandNames.link_table:
removeText();
openAutocomplete(view, '[[', 'table: ');
return true;
case CommandNames.link_notebook:
removeText();
openAutocomplete(view, '[[', 'notebook: ');
return true;
case CommandNames.citation:
removeText();
openAutocomplete(view, '[[', 'cite: ');
return true;
case CommandNames.add_citation: {
removeText();
const cites = await opts.citationPrompt();
if (!cites || cites.length === 0) return true;
const nodes = cites.map((attrs) => schema.nodes.cite.create({ ...attrs }));
const wrapped = schema.nodes.cite_group.createAndFill({}, Fragment.from(nodes));
if (!wrapped) return false;
const tr = view.state.tr.replaceSelectionWith(wrapped).scrollIntoView();
view.dispatch(tr);
return true;
}
default:
return removeText();
}
};
}
export function chooseSelection(result: CommandResult): AppThunk<Promise<boolean>> {
return async (dispatch, getState) => {
const {
view,
range: { from, to },
} = selectSuggestionState(getState());
if (view == null) return false;
const removeText = () => {
const { tr } = view.state;
tr.insertText('', from, to);
view.dispatch(tr);
return true;
};
return dispatch(executeCommand(result.name, view, removeText, true));
};
}
// This is created once, is slightly expensive, and then search method is called many times.
const fuse = new Fuse(ALL_COMMANDS, options);
export function filterResults(
view: EditorView,
search: string,
callback: (results: CommandResult[]) => void,
): void {
// This lets the keystroke go through:
setTimeout(() => {
const results: CommandResult[] = fuse.search(search as string).map((result) => result.item);
const filtered = filterCommands(view, results);
callback(filtered);
}, 1);
} | the_stack |
import { expect } from 'chai';
import * as imageManager from '../../../src/compose/images';
import * as dbHelper from '../../lib/db-helper';
import { createImage, withMockerode } from '../../lib/mockerode';
import * as sinon from 'sinon';
import log from '../../../src/lib/supervisor-console';
// TODO: this code is duplicated in multiple tests
// create a test module with all helper functions like this
function createDBImage(
{
appId = 1,
name = 'test-image',
serviceName = 'test',
dependent = 0,
...extra
} = {} as Partial<imageManager.Image>,
) {
return {
appId,
dependent,
name,
serviceName,
...extra,
} as imageManager.Image;
}
describe('compose/images', () => {
let testDb: dbHelper.TestDatabase;
before(async () => {
testDb = await dbHelper.createDB();
// disable log output during testing
sinon.stub(log, 'debug');
sinon.stub(log, 'warn');
sinon.stub(log, 'info');
sinon.stub(log, 'event');
sinon.stub(log, 'success');
});
after(async () => {
try {
await testDb.destroy();
} catch (e) {
/* noop */
}
// Restore stubbed methods
sinon.restore();
});
afterEach(async () => {
await testDb.reset();
});
it('finds image by matching digest on the database', async () => {
const dbImage = createDBImage({
name:
'registry2.balena-cloud.com/v2/aaaaa@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
dockerImageId:
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
});
await testDb.models('image').insert([dbImage]);
const images = [
createImage(
{
Id:
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
},
{
References: [
// Different image repo
'registry2.balena-cloud.com/v2/bbbb@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
],
},
),
];
await withMockerode(
async (mockerode) => {
// Looking by name should fail, if not, this is a mockerode issue
await expect(mockerode.getImage(dbImage.name).inspect()).to.be.rejected;
// Looking up the image by id should succeed
await expect(mockerode.getImage(dbImage.dockerImageId!).inspect()).to
.not.be.rejected;
// The image is found
expect(await imageManager.inspectByName(dbImage.name))
.to.have.property('Id')
.that.equals(
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
);
// It couldn't find the image by name so it finds it by matching digest
expect(mockerode.getImage).to.have.been.calledWith(
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
);
},
{ images },
);
});
it('finds image by tag on the engine', async () => {
const images = [
createImage(
{
Id:
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
},
{
References: ['some-image:some-tag'],
},
),
];
await withMockerode(
async (mockerode) => {
expect(await imageManager.inspectByName('some-image:some-tag'))
.to.have.property('Id')
.that.equals(
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
);
expect(mockerode.getImage).to.have.been.calledWith(
'some-image:some-tag',
);
// Check that non existing tags are not found
await expect(
imageManager.inspectByName('non-existing-image:non-existing-tag'),
).to.be.rejected;
},
{ images },
);
});
it('finds image by tag on the database', async () => {
const dbImage = createDBImage({
name: 'some-image:some-tag',
dockerImageId:
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
});
await testDb.models('image').insert([dbImage]);
const images = [
createImage(
{
Id:
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
},
{
References: [
// Reference is different but there is a matching name on the database
'registry2.balena-cloud.com/v2/bbbb@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
],
},
),
];
await withMockerode(
async (mockerode) => {
expect(await imageManager.inspectByName(dbImage.name))
.to.have.property('Id')
.that.equals(
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
);
expect(mockerode.getImage).to.have.been.calledWith(
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
);
// Check that non existing tags are not found
await expect(
imageManager.inspectByName('non-existing-image:non-existing-tag'),
).to.be.rejected;
},
{ images },
);
});
it('finds image by reference on the engine', async () => {
const images = [
createImage(
{
Id:
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
},
{
References: [
// the reference we expect to look for is registry2.balena-cloud.com/v2/one
'registry2.balena-cloud.com/v2/one:delta-one@sha256:12345a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
'registry2.balena-cloud.com/v2/one:latest@sha256:12345a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
],
},
),
];
await withMockerode(
async (mockerode) => {
// This is really testing mockerode functionality
expect(
await mockerode.listImages({
filters: {
reference: ['registry2.balena-cloud.com/v2/one'],
},
}),
).to.have.lengthOf(1);
expect(
await imageManager.inspectByName(
// different target digest but same imageName
'registry2.balena-cloud.com/v2/one@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
),
)
.to.have.property('Id')
.that.equals(
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
);
expect(mockerode.getImage).to.have.been.calledWith(
'sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc90e8e7',
);
// Looking for the reference with correct name tag shoud not throw
await expect(
imageManager.inspectByName(
// different target digest but same tag
'registry2.balena-cloud.com/v2/one:delta-one@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
),
).to.not.be.rejected;
// Looking for a non existing reference should throw
await expect(
imageManager.inspectByName(
'registry2.balena-cloud.com/v2/two@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
),
).to.be.rejected;
await expect(
imageManager.inspectByName(
'registry2.balena-cloud.com/v2/one:some-tag@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
),
).to.be.rejected;
},
{ images },
);
});
it('returns all images in both the database and the engine', async () => {
await testDb.models('image').insert([
createDBImage({
name: 'first-image-name:first-image-tag',
serviceName: 'app_1',
dockerImageId: 'sha256:first-image-id',
}),
createDBImage({
name: 'second-image-name:second-image-tag',
serviceName: 'app_2',
dockerImageId: 'sha256:second-image-id',
}),
createDBImage({
name:
'registry2.balena-cloud.com/v2/three@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf558',
serviceName: 'app_3',
// Third image has different name but same docker id
dockerImageId: 'sha256:second-image-id',
}),
createDBImage({
name: 'fourth-image-name:fourth-image-tag',
serviceName: 'app_4',
// The fourth image exists on the engine but with the wrong id
dockerImageId: 'sha256:fourth-image-id',
}),
]);
const images = [
createImage(
{
Id: 'sha256:first-image-id',
},
{
References: ['first-image-name:first-image-tag'],
},
),
createImage(
{
Id: 'sha256:second-image-id',
},
{
References: [
// The tag for the second image does not exist on the engine but it should be found
'not-second-image-name:some-image-tag',
'fourth-image-name:fourth-image-tag',
'registry2.balena-cloud.com/v2/three@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf558',
],
},
),
];
// Perform the test with our specially crafted data
await withMockerode(
async (mockerode) => {
// failsafe to check for mockerode problems
expect(
await mockerode.listImages(),
'images exist on the engine before test',
).to.have.lengthOf(2);
const availableImages = await imageManager.getAvailable();
expect(availableImages).to.have.lengthOf(4);
},
{ images },
);
});
it('removes a single legacy db images without dockerImageId', async () => {
// Legacy images don't have a dockerImageId so they are queried by name
const imageToRemove = createDBImage({
name: 'image-name:image-tag',
});
await testDb.models('image').insert([imageToRemove]);
// Engine image state
const images = [
createImage(
{
Id: 'deadbeef',
},
{
// Image references
References: [
'image-name:image-tag@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
],
},
),
createImage(
{
Id: 'deadca1f',
},
{
References: ['balena/aarch64-supervisor:11.11.11'],
},
),
];
// Perform the test with our specially crafted data
await withMockerode(
async (mockerode) => {
// Check that our legacy image exists
// failsafe to check for mockerode problems
await expect(
mockerode.getImage(imageToRemove.name).inspect(),
'image exists on the engine before test',
).to.not.be.rejected;
// Check that the image exists on the db
expect(
await testDb.models('image').select().where(imageToRemove),
).to.have.lengthOf(1);
// Now remove this image...
await imageManager.remove(imageToRemove);
// This checks that the remove method was ultimately called
expect(mockerode.removeImage).to.have.been.calledOnceWith(
imageToRemove.name,
);
// Check that the image was removed from the db
expect(await testDb.models('image').select().where(imageToRemove)).to.be
.empty;
},
{ images },
);
});
it('removes image from DB and engine when there is a single DB image with matching name', async () => {
// Newer image
const imageToRemove = createDBImage({
name:
'registry2.balena-cloud.com/v2/one@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
dockerImageId: 'sha256:image-id-one',
});
// Insert images into the db
await testDb.models('image').insert([
imageToRemove,
createDBImage({
name:
'registry2.balena-cloud.com/v2/two@sha256:12345a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
dockerImageId: 'sha256:image-id-two',
}),
]);
// Engine image state
const images = [
// The image to remove
createImage(
{
Id: 'sha256:image-id-one',
},
{
References: [
'registry2.balena-cloud.com/v2/one:delta-one@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
],
},
),
// Other images to test
createImage(
{
Id: 'aaa',
},
{
References: ['balena/aarch64-supervisor:11.11.11'],
},
),
// The other image on the database
createImage(
{
Id: 'sha256:image-id-two',
},
{
References: [
'registry2.balena-cloud.com/v2/two:delta-two@sha256:12345a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
],
},
),
];
// Perform the test with our specially crafted data
await withMockerode(
async (mockerode) => {
// Check that the image exists
// this is really checking that mockerode works ok
await expect(
mockerode.getImage(imageToRemove.name).inspect(),
'image exists on the engine before test',
).to.not.be.rejected;
// Check that only one image with this dockerImageId exists in the db
// in memory db is a bit flaky sometimes, this checks for issues
expect(
await testDb.models('image').where(imageToRemove).select(),
'image exists on db before the test',
).to.have.lengthOf(1);
// Now remove this image...
await imageManager.remove(imageToRemove);
// Check that the remove method was only called once
expect(mockerode.removeImage).to.have.been.calledOnceWith(
'registry2.balena-cloud.com/v2/one:delta-one',
);
// Check that the database no longer has this image
expect(await testDb.models('image').select().where(imageToRemove)).to.be
.empty;
// Expect 1 entry left on the database
expect(await testDb.models('image').select()).to.have.lengthOf(1);
},
{ images },
);
});
it('removes the requested image even when there are multiple DB images with same docker ID', async () => {
const imageToRemove = createDBImage({
name:
'registry2.balena-cloud.com/v2/one@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
dockerImageId: 'sha256:image-id-one',
});
const imageWithSameDockerImageId = createDBImage({
name:
'registry2.balena-cloud.com/v2/two@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
// Same imageId
dockerImageId: 'sha256:image-id-one',
});
// Insert images into the db
await testDb.models('image').insert([
imageToRemove,
// Another image from the same app
imageWithSameDockerImageId,
]);
// Engine image state
const images = [
// The image to remove
createImage(
{
Id: imageToRemove.dockerImageId!,
},
{
References: [imageToRemove.name, imageWithSameDockerImageId.name],
},
),
// Other images to test
createImage(
{
Id: 'aaa',
},
{
References: ['balena/aarch64-supervisor:11.11.11'],
},
),
];
// Perform the test with our specially crafted data
await withMockerode(
async (mockerode) => {
// Check that the image is on the engine
// really checking mockerode behavior
await expect(
mockerode.getImage(imageToRemove.dockerImageId!).inspect(),
'image exists on the engine before the test',
).to.not.be.rejected;
// Check that multiple images with the same dockerImageId are returned
expect(
await testDb
.models('image')
.where({ dockerImageId: imageToRemove.dockerImageId })
.select(),
).to.have.lengthOf(2);
// Now remove these images
await imageManager.remove(imageToRemove);
// Check that only the image with the right name was removed
expect(mockerode.removeImage).to.have.been.calledOnceWith(
imageToRemove.name,
);
// Check that the database no longer has this image
expect(await testDb.models('image').select().where(imageToRemove)).to.be
.empty;
// Check that the image with the same dockerImageId is still on the database
expect(
await testDb
.models('image')
.select()
.where({ dockerImageId: imageWithSameDockerImageId.dockerImageId }),
).to.have.lengthOf(1);
},
{ images },
);
});
it('removes image from DB by tag when deltas are being used', async () => {
const imageToRemove = createDBImage({
name:
'registry2.balena-cloud.com/v2/one@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
dockerImageId: 'sha256:image-one-id',
});
const imageWithSameDockerImageId = createDBImage({
name:
'registry2.balena-cloud.com/v2/two@sha256:2c969a1ba1c6bc10df53481f48c6a74dbd562cfb41ba58f81beabd03facf5582',
// Same docker id
dockerImageId: 'sha256:image-one-id',
});
// Insert images into the db
await testDb.models('image').insert([
imageToRemove,
// Another image from the same app
imageWithSameDockerImageId,
]);
// Engine image state
const images = [
// The image to remove
createImage(
{
Id: imageToRemove.dockerImageId!,
},
{
References: [
// The image has two deltas with different digests than those in image.name
'registry2.balena-cloud.com/v2/one:delta-one@sha256:6eb712fc797ff68f258d9032cf292c266cb9bd8be4cbdaaafeb5a8824bb104fd',
'registry2.balena-cloud.com/v2/two:delta-two@sha256:f1154d76c731f04711e5856b6e6858730e3023d9113124900ac65c2ccc901234',
],
},
),
];
// Perform the test with our specially crafted data
await withMockerode(
async (mockerode) => {
// Check that the image is on the engine
await expect(
mockerode.getImage(imageToRemove.dockerImageId!).inspect(),
'image can be found by id before the test',
).to.not.be.rejected;
// Check that a single image is returned when given entire object
expect(
await testDb.models('image').select().where(imageToRemove),
).to.have.lengthOf(1);
// Check that multiple images with the same dockerImageId are returned
expect(
await testDb
.models('image')
.where({ dockerImageId: imageToRemove.dockerImageId })
.select(),
).to.have.lengthOf(2);
// Now remove these images
await imageManager.remove(imageToRemove);
// This tests the behavior
expect(mockerode.removeImage).to.have.been.calledOnceWith(
'registry2.balena-cloud.com/v2/one:delta-one',
);
// Check that the database no longer has this image
expect(await testDb.models('image').select().where(imageToRemove)).to.be
.empty;
// Check that the image with the same dockerImageId is still on the database
expect(
await testDb
.models('image')
.select()
.where(imageWithSameDockerImageId),
).to.have.lengthOf(1);
},
{ images },
);
});
}); | the_stack |
import type { AstNode, BinderContext, CheckerContext, Location, MetaRegistry, RangeLike, StringNode, Symbol, SymbolQuery } from '@spyglassmc/core'
import { AsyncBinder, atArray, Dev, ErrorSeverity, Range, ResourceLocationNode, SymbolUtil, SymbolVisibility } from '@spyglassmc/core'
import { localeQuote, localize } from '@spyglassmc/locales'
import type { AdditionalContext } from '../common.js'
import type { AnyTypeNode, AttributeValueNode, BooleanTypeNode, EnumValueNode, IdentifierNode, IndexNode, LiteralTypeValueNode, ModuleNode, StructFieldNode, StructKeyNode, TypeNode } from '../node/index.js'
import { AttributeNode, AttributeTreeNamedValuesNode, AttributeTreeNode, AttributeTreePosValuesNode, DispatcherTypeNode, DispatchStatementNode, DocCommentsNode, DynamicIndexNode, EnumBlockNode, EnumFieldNode, EnumInjectionNode, EnumNode, FloatRangeNode, IndexBodyNode, InjectionNode, IntRangeNode, ListTypeNode, LiteralNode, LiteralTypeNode, NumericTypeNode, PathNode, PrimitiveArrayTypeNode, ReferenceTypeNode, StaticIndexNode, StringTypeNode, StructBlockNode, StructMapKeyNode, StructNode, StructPairFieldNode, StructSpreadFieldNode, TupleTypeNode, TypeAliasNode, TypeBaseNode, TypedNumberNode, TypeParamBlockNode, TypeParamNode, UnionTypeNode, UseStatementNode } from '../node/index.js'
import type { Attribute, AttributeTree, AttributeValue, DispatcherType, DynamicIndex, EnumType, EnumTypeField, Index, KeywordType, ListType, LiteralNumberCaseInsensitiveSuffix, LiteralNumberSuffix, LiteralType, LiteralValue, McdocType, NumericRange, NumericType, NumericTypeKind, ParallelIndices, PrimitiveArrayType, PrimitiveArrayValueKind, ReferenceType, StaticIndex, StringType, StructType, StructTypeField, StructTypePairField, StructTypeSpreadField, TupleType, TypeBase, UnionType } from '../type/index.js'
interface McdocBinderContext extends BinderContext, AdditionalContext { }
export const fileModule = AsyncBinder.create<ModuleNode>(async (node, ctx) => {
const moduleIdentifier = uriToIdentifier(ctx.doc.uri, ctx)
if (!moduleIdentifier) {
ctx.err.report(
localize('mcdoc.binder.out-of-root', localeQuote(ctx.doc.uri)),
Range.Beginning, ErrorSeverity.Hint
)
return
}
const mcdocCtx: McdocBinderContext = {
...ctx,
moduleIdentifier,
}
return module_(node, mcdocCtx)
})
export async function module_(node: ModuleNode, ctx: McdocBinderContext): Promise<void> {
hoist(node, ctx)
for (const child of node.children) {
switch (child.type) {
case 'mcdoc:dispatch_statement': await bindDispatchStatement(child, ctx); break
case 'mcdoc:enum': bindEnum(child, ctx); break
case 'mcdoc:injection': await bindInjection(child, ctx); break
case 'mcdoc:struct': await bindStruct(child, ctx); break
case 'mcdoc:type_alias': await bindTypeAlias(child, ctx); break
case 'mcdoc:use_statement': await bindUseStatement(child, ctx); break
}
}
}
/**
* Hoist enums, structs, type aliases, and use statements under the module scope.
*/
function hoist(node: ModuleNode, ctx: McdocBinderContext): void {
for (const child of node.children) {
switch (child.type) {
case 'mcdoc:enum': hoistEnum(child); break
case 'mcdoc:struct': hoistStruct(child); break
case 'mcdoc:type_alias': hoistTypeAlias(child); break
case 'mcdoc:use_statement': hoistUseStatement(child); break
}
}
function hoistEnum(node: EnumNode) {
hoistFor('enum', node, EnumNode.destruct, n => ({ typeDef: convertEnum(n, ctx) }))
}
function hoistStruct(node: StructNode) {
hoistFor('struct', node, StructNode.destruct, n => ({ typeDef: convertStruct(n, ctx) }))
}
function hoistTypeAlias(node: TypeAliasNode) {
hoistFor('type_alias', node, TypeAliasNode.destruct, n => {
const { rhs } = TypeAliasNode.destruct(n)
if (!rhs) {
return undefined
}
return { typeDef: convertType(rhs, ctx) }
})
}
function hoistUseStatement(node: UseStatementNode) {
const { binding, path } = UseStatementNode.destruct(node)
if (!path) {
return
}
const { lastIdentifier } = PathNode.destruct(path)
const identifier = binding ?? lastIdentifier
if (!identifier) {
return
}
ctx.symbols
.query({ doc: ctx.doc, node }, 'mcdoc', `${ctx.moduleIdentifier}::${identifier.value}`)
.ifDeclared(symbol => reportDuplicatedDeclaration(ctx, symbol, identifier))
.elseEnter({
data: { subcategory: 'use_statement_binding', visibility: SymbolVisibility.File },
usage: { type: 'definition', node: identifier, fullRange: node },
})
}
function hoistFor<N extends AstNode>(subcategory: 'enum' | 'struct' | 'type_alias', node: N, destructor: (node: N) => { docComments?: DocCommentsNode, identifier?: IdentifierNode }, getData: (node: N) => unknown) {
const { docComments, identifier } = destructor(node)
if (!identifier?.value) {
return
}
ctx.symbols
.query({ doc: ctx.doc, node }, 'mcdoc', `${ctx.moduleIdentifier}::${identifier.value}`)
.ifDeclared(symbol => reportDuplicatedDeclaration(ctx, symbol, identifier))
.elseEnter({
data: { data: getData(node), desc: DocCommentsNode.asText(docComments), subcategory },
usage: { type: 'definition', node: identifier, fullRange: node },
})
}
}
async function bindDispatchStatement(node: DispatchStatementNode, ctx: McdocBinderContext): Promise<void> {
const { attributes, location, index, target } = DispatchStatementNode.destruct(node)
if (!(location && index && target)) {
return
}
const locationStr = ResourceLocationNode.toString(location, 'full')
ctx.symbols
.query(ctx.doc, 'mcdoc/dispatcher', locationStr)
.enter({
usage: { type: 'reference', node: location, fullRange: node },
})
const { parallelIndices } = IndexBodyNode.destruct(index)
for (const key of parallelIndices) {
if (DynamicIndexNode.is(key)) {
// Ignore dynamic indices in dispatch statements.
continue
}
ctx.symbols
.query(ctx.doc, 'mcdoc/dispatcher', locationStr, asString(key))
.ifDeclared(symbol => reportDuplicatedDeclaration(ctx, symbol, key, { localeString: 'mcdoc.binder.dispatcher-statement.duplicated-key.related' }))
.elseEnter({
data: {
data: {
attributes: convertAttributes(attributes, ctx),
typeDef: convertType(target, ctx),
},
},
usage: { type: 'definition', node: key, fullRange: node },
})
}
await bindType(target, ctx)
}
async function bindType(node: TypeNode, ctx: McdocBinderContext): Promise<void> {
if (DispatcherTypeNode.is(node)) {
bindDispatcherType(node, ctx)
} else if (EnumNode.is(node)) {
bindEnum(node, ctx)
} else if (ListTypeNode.is(node)) {
const { item } = ListTypeNode.destruct(node)
await bindType(item, ctx)
} else if (ReferenceTypeNode.is(node)) {
const { path, typeParameters } = ReferenceTypeNode.destruct(node)
await bindPath(path, ctx)
for (const param of typeParameters) {
await bindType(param, ctx)
}
} else if (StructNode.is(node)) {
await bindStruct(node, ctx)
} else if (TupleTypeNode.is(node)) {
const { items } = TupleTypeNode.destruct(node)
for (const item of items) {
await bindType(item, ctx)
}
} else if (UnionTypeNode.is(node)) {
const { members } = UnionTypeNode.destruct(node)
for (const member of members) {
await bindType(member, ctx)
}
}
}
function bindDispatcherType(node: DispatcherTypeNode, ctx: McdocBinderContext): void {
const { index, location } = DispatcherTypeNode.destruct(node)
const locationStr = ResourceLocationNode.toString(location, 'full')
const { parallelIndices } = IndexBodyNode.destruct(index)
for (const key of parallelIndices) {
if (DynamicIndexNode.is(key)) {
// Although it is technically possible to bind some of the dynamic indices as references
// of struct keys, it is rather complicated to do so. We will ignore them for now.
continue
}
ctx.symbols
.query(ctx.doc, 'mcdoc/dispatcher', locationStr, asString(key))
.enter({ usage: { type: 'reference', node: key, fullRange: node } })
}
}
async function bindPath(node: PathNode, ctx: McdocBinderContext): Promise<void> {
await ensureFileBound(node, ctx)
resolvePath(node, ctx, {
callback: (identifiers, identNode) => {
ctx.symbols
.query({ doc: ctx.doc, node: identNode }, 'mcdoc', pathArrayToString(identifiers))
.amend({ usage: { type: 'reference', node: identNode, fullRange: node, skipRenaming: LiteralNode.is(identNode) } })
},
reportErrors: true,
})
}
function bindEnum(node: EnumNode, ctx: McdocBinderContext): void {
const { block } = EnumNode.destruct(node)
if (node.symbol?.subcategory !== 'enum') {
return
}
const query = ctx.symbols.query({ doc: ctx.doc, node }, 'mcdoc', ...node.symbol.path)
Dev.assertDefined(query.symbol)
bindEnumBlock(block, ctx, query)
}
function bindEnumBlock(node: EnumBlockNode, ctx: McdocBinderContext, query: SymbolQuery, options: { extendsTypeDefData?: boolean } = {}): void {
const { fields } = EnumBlockNode.destruct(node)
for (const field of fields) {
const { identifier } = EnumFieldNode.destruct(field)
query.member(identifier.value, fieldQuery => fieldQuery
.ifDeclared(symbol => reportDuplicatedDeclaration(ctx, symbol, identifier))
.elseEnter({ usage: { type: 'definition', node: identifier, fullRange: field } })
)
}
}
async function bindInjection(node: InjectionNode, ctx: McdocBinderContext): Promise<void> {
const { injection } = InjectionNode.destruct(node)
if (EnumInjectionNode.is(injection)) {
// TODO
// const { } = EnumInjectionNode.destruct(injection)
// bindEnumBlock(block, ctx, query, { extendsTypeDefData: true })
}
}
async function bindStruct(node: StructNode, ctx: McdocBinderContext): Promise<void> {
const { block } = StructNode.destruct(node)
if (node.symbol?.subcategory !== 'struct') {
return
}
const query = ctx.symbols.query({ doc: ctx.doc, node }, 'mcdoc', ...node.symbol.path)
Dev.assertDefined(query.symbol)
await bindStructBlock(block, ctx, query)
}
async function bindStructBlock(node: StructBlockNode, ctx: McdocBinderContext, query: SymbolQuery, options: { extendsTypeDefData?: boolean } = {}): Promise<void> {
const { fields } = StructBlockNode.destruct(node)
for (const field of fields) {
if (StructPairFieldNode.is(field)) {
const { key, type } = StructPairFieldNode.destruct(field)
if (!StructMapKeyNode.is(key)) {
query.member(key.value, fieldQuery => fieldQuery
.ifDeclared(symbol => reportDuplicatedDeclaration(ctx, symbol, key))
.elseEnter({ usage: { type: 'definition', node: key, fullRange: field } })
)
}
await bindType(type, ctx)
} else {
const { type } = StructSpreadFieldNode.destruct(field)
await bindType(type, ctx)
}
}
}
async function bindTypeAlias(node: TypeAliasNode, ctx: McdocBinderContext): Promise<void> {
const { identifier, rhs, typeParams } = TypeAliasNode.destruct(node)
if (!identifier?.value) {
return
}
if (typeParams) {
// Type parameters are added as 1) members of the type alias symbol and 2) local symbols on the type alias AST node.
node.locals = Object.create(null)
const { params } = TypeParamBlockNode.destruct(typeParams)
const query = ctx.symbols.query({ doc: ctx.doc, node }, 'mcdoc', `${ctx.moduleIdentifier}::${identifier.value}`)
for (const param of params) {
const { constraint, identifier: paramIdentifier } = TypeParamNode.destruct(param)
if (query.symbol?.subcategory === 'type_alias' && paramIdentifier.value) {
query.member(paramIdentifier.value, q => q
.ifDeclared(symbol => reportDuplicatedDeclaration(ctx, symbol, paramIdentifier))
.elseEnter({ usage: { type: 'declaration', node: paramIdentifier, fullRange: param } })
)
ctx.symbols
.query({ doc: ctx.doc, node }, 'mcdoc', `${ctx.moduleIdentifier}::${identifier.value}::${paramIdentifier.value}`)
.ifDeclared(symbol => reportDuplicatedDeclaration(ctx, symbol, paramIdentifier))
.elseEnter({ data: { visibility: SymbolVisibility.Block }, usage: { type: 'declaration', node: paramIdentifier, fullRange: param } })
}
if (constraint) {
await bindPath(constraint, ctx)
}
}
}
if (rhs) {
await bindType(rhs, ctx)
}
}
async function bindUseStatement(node: UseStatementNode, ctx: McdocBinderContext): Promise<void> {
const { path } = UseStatementNode.destruct(node)
if (!path) {
return
}
return bindPath(path, ctx)
}
export function registerMcdocBinders(meta: MetaRegistry) {
meta.registerBinder<ModuleNode>('mcdoc:module', fileModule)
}
function reportDuplicatedDeclaration(ctx: McdocBinderContext, symbol: Symbol, range: RangeLike, options: { localeString: 'mcdoc.binder.dispatcher-statement.duplicated-key.related' | 'mcdoc.binder.duplicated-declaration' } = { localeString: 'mcdoc.binder.duplicated-declaration' }) {
ctx.err.report(
localize(options.localeString, localeQuote(symbol.identifier)),
range, ErrorSeverity.Warning,
{
related: [{
location: SymbolUtil.getDeclaredLocation(symbol) as Location,
message: localize(`${options.localeString}.related`, localeQuote(symbol.identifier)),
}],
}
)
}
function resolvePath(path: PathNode, ctx: McdocBinderContext, options: { callback?: (identifiers: readonly string[], node: IdentifierNode | LiteralNode) => void, reportErrors?: boolean } = {}): string[] | undefined {
const { children, isAbsolute } = PathNode.destruct(path)
const identifiers: string[] = isAbsolute
? []
: ctx.moduleIdentifier.slice(2).split('::')
for (const child of children) {
switch (child.type) {
case 'mcdoc:identifier':
identifiers.push(child.value)
break
case 'mcdoc:literal':
// super
if (identifiers.length === 0) {
if (options.reportErrors) {
ctx.err.report(localize('mcdoc.binder.path.super-from-root'), child)
}
return undefined
}
identifiers.pop()
break
default:
Dev.assertNever(child)
}
options.callback?.(identifiers, child)
}
return identifiers
}
async function ensureFileBound(path: PathNode, ctx: McdocBinderContext): Promise<string[] | undefined> {
const identifiers = resolvePath(path, ctx)
if (!identifiers?.length) {
return undefined
}
const referencedModuleFile = `::${identifiers.slice(0, -1).join('::')}`
const referencedModuleUri = identifierToUri(referencedModuleFile, ctx)
const referencedPath = pathArrayToString(identifiers)
if (!referencedModuleUri) {
ctx.err.report(localize('mcdoc.binder.path.unknown-module', localeQuote(referencedModuleFile)), path)
return undefined
}
await ctx.ensureBound(referencedModuleUri)
if (!ctx.symbols.global.mcdoc?.[referencedPath]?.definition?.length) {
ctx.err.report(localize('mcdoc.binder.path.unknown-identifier', localeQuote(atArray(identifiers, -1)!)), path)
return undefined
}
return identifiers
}
function identifierToUri(module: string, ctx: McdocBinderContext): string | undefined {
return ctx.symbols.global.mcdoc?.[module]?.definition?.[0]?.uri
}
function uriToIdentifier(uri: string, ctx: CheckerContext): string | undefined {
return Object
.values(ctx.symbols.global.mcdoc ?? {})
.find(symbol => {
return symbol.subcategory === 'module' && symbol.definition?.some(loc => loc.uri === uri)
})
?.identifier
}
function pathArrayToString(path: readonly string[]): string
function pathArrayToString(path: readonly string[] | undefined): string | undefined
function pathArrayToString(path: readonly string[] | undefined): string | undefined {
return path ? `::${path.join('::')}` : undefined
}
function convertType(node: TypeNode, ctx: McdocBinderContext): McdocType {
switch (node.type) {
case 'mcdoc:enum': return convertEnum(node, ctx)
case 'mcdoc:struct': return convertStruct(node, ctx)
case 'mcdoc:type/any': return convertAny(node, ctx)
case 'mcdoc:type/boolean': return convertBoolean(node, ctx)
case 'mcdoc:type/dispatcher': return convertDispatcher(node, ctx)
case 'mcdoc:type/list': return convertList(node, ctx)
case 'mcdoc:type/literal': return convertLiteral(node, ctx)
case 'mcdoc:type/numeric_type': return convertNumericType(node, ctx)
case 'mcdoc:type/primitive_array': return convertPrimitiveArray(node, ctx)
case 'mcdoc:type/string': return convertString(node, ctx)
case 'mcdoc:type/reference': return convertReference(node, ctx)
case 'mcdoc:type/tuple': return convertTuple(node, ctx)
case 'mcdoc:type/union': return convertUnion(node, ctx)
default: return Dev.assertNever(node)
}
}
function convertBase(node: TypeBaseNode<any>, ctx: McdocBinderContext, options: { skipFirstIndexBody?: boolean } = {}): Omit<TypeBase, 'kind'> {
const { attributes, indices } = TypeBaseNode.destruct(node)
return {
attributes: convertAttributes(attributes, ctx),
indices: convertIndexBodies(options.skipFirstIndexBody ? indices.slice(1) : indices, ctx),
}
}
function convertAttributes(nodes: AttributeNode[], ctx: McdocBinderContext): Attribute[] | undefined {
return undefineEmptyArray(nodes.map(n => convertAttribute(n, ctx)))
}
function undefineEmptyArray<T>(array: T[]): T[] | undefined {
return array.length ? array : undefined
}
function convertAttribute(node: AttributeNode, ctx: McdocBinderContext): Attribute {
const { name, value } = AttributeNode.destruct(node)
return {
name: name.value,
value: convertAttributeValue(value, ctx),
}
}
function convertAttributeValue(node: AttributeValueNode, ctx: McdocBinderContext): AttributeValue {
if (node.type === 'mcdoc:attribute/tree') {
return convertAttributeTree(node, ctx)
} else {
return convertType(node, ctx)
}
}
function convertAttributeTree(node: AttributeTreeNode, ctx: McdocBinderContext): AttributeTree {
const ans: AttributeTree = {}
const { named, positional } = AttributeTreeNode.destruct(node)
if (positional) {
const { values } = AttributeTreePosValuesNode.destruct(positional)
for (const [i, child] of values.entries()) {
ans[i] = convertAttributeValue(child, ctx)
}
}
if (named) {
const { values } = AttributeTreeNamedValuesNode.destruct(named)
for (const { key, value } of values) {
ans[key.value] = convertAttributeValue(value, ctx)
}
}
return ans
}
function convertIndexBodies(nodes: IndexBodyNode[], ctx: McdocBinderContext): ParallelIndices[] | undefined {
return undefineEmptyArray(nodes.map(n => convertIndexBody(n, ctx)))
}
function convertIndexBody(node: IndexBodyNode, ctx: McdocBinderContext): ParallelIndices {
const { parallelIndices } = IndexBodyNode.destruct(node)
return parallelIndices.map(n => convertIndex(n, ctx))
}
function convertIndex(node: IndexNode, ctx: McdocBinderContext): Index {
return StaticIndexNode.is(node)
? convertStaticIndex(node, ctx)
: convertDynamicIndex(node, ctx)
}
function convertStaticIndex(node: StaticIndexNode, ctx: McdocBinderContext): StaticIndex {
return {
kind: 'static',
value: asString(node),
}
}
function convertDynamicIndex(node: DynamicIndexNode, ctx: McdocBinderContext): DynamicIndex {
const { keys } = DynamicIndexNode.destruct(node)
return {
kind: 'dynamic',
accessor: keys.map(asString),
}
}
function convertEnum(node: EnumNode, ctx: McdocBinderContext): EnumType {
const { block, enumKind } = EnumNode.destruct(node)
return {
...convertBase(node, ctx),
kind: 'enum',
enumKind,
values: convertEnumBlock(block, ctx),
}
}
function convertEnumBlock(node: EnumBlockNode, ctx: McdocBinderContext): EnumTypeField[] {
const { fields } = EnumBlockNode.destruct(node)
return fields.map(n => convertEnumField(n, ctx))
}
function convertEnumField(node: EnumFieldNode, ctx: McdocBinderContext): EnumTypeField {
const { attributes, identifier, value } = EnumFieldNode.destruct(node)
return {
attributes: convertAttributes(attributes, ctx),
identifier: identifier.value,
value: convertEnumValue(value, ctx),
}
}
function convertEnumValue(node: EnumValueNode, ctx: McdocBinderContext): string | number | bigint {
if (TypedNumberNode.is(node)) {
const { value } = TypedNumberNode.destruct(node)
return value.value
}
return node.value
}
function convertStruct(node: StructNode, ctx: McdocBinderContext): StructType {
const { block } = StructNode.destruct(node)
return {
...convertBase(node, ctx),
kind: 'struct',
fields: convertStructBlock(block, ctx),
}
}
function convertStructBlock(node: StructBlockNode, ctx: McdocBinderContext): StructTypeField[] {
const { fields } = StructBlockNode.destruct(node)
return fields.map(n => convertStructField(n, ctx))
}
function convertStructField(node: StructFieldNode, ctx: McdocBinderContext): StructTypeField {
return StructPairFieldNode.is(node)
? convertStructPairField(node, ctx)
: convertStructSpreadField(node, ctx)
}
function convertStructPairField(node: StructPairFieldNode, ctx: McdocBinderContext): StructTypePairField {
const { key, type } = StructPairFieldNode.destruct(node)
return {
kind: 'pair',
key: convertStructKey(key, ctx),
type: convertType(type, ctx),
}
}
function convertStructKey(node: StructKeyNode, ctx: McdocBinderContext): string | McdocType {
if (StructMapKeyNode.is(node)) {
const { type } = StructMapKeyNode.destruct(node)
return convertType(type, ctx)
} else {
return asString(node)
}
}
function convertStructSpreadField(node: StructSpreadFieldNode, ctx: McdocBinderContext): StructTypeSpreadField {
const { attributes, type } = StructSpreadFieldNode.destruct(node)
return {
kind: 'spread',
attributes: convertAttributes(attributes, ctx),
type: convertType(type, ctx),
}
}
function convertAny(node: AnyTypeNode, ctx: McdocBinderContext): KeywordType {
return {
...convertBase(node, ctx),
kind: 'any',
}
}
function convertBoolean(node: BooleanTypeNode, ctx: McdocBinderContext): KeywordType {
return {
...convertBase(node, ctx),
kind: 'boolean',
}
}
function convertDispatcher(node: DispatcherTypeNode, ctx: McdocBinderContext): DispatcherType {
const { index, location } = DispatcherTypeNode.destruct(node)
return {
...convertBase(node, ctx, {
skipFirstIndexBody: true,
}),
kind: 'dispatcher',
index: convertIndexBody(index, ctx),
registry: ResourceLocationNode.toString(location, 'full'),
}
}
function convertList(node: ListTypeNode, ctx: McdocBinderContext): ListType {
const { item, lengthRange } = ListTypeNode.destruct(node)
return {
...convertBase(node, ctx),
kind: 'list',
item: convertType(item, ctx),
lengthRange: convertRange(lengthRange, ctx),
}
}
function convertRange(node: FloatRangeNode | IntRangeNode, ctx: McdocBinderContext): NumericRange
function convertRange(node: FloatRangeNode | IntRangeNode | undefined, ctx: McdocBinderContext): NumericRange | undefined
function convertRange(node: FloatRangeNode | IntRangeNode | undefined, ctx: McdocBinderContext): NumericRange | undefined {
if (!node) {
return undefined
}
const { min, max } = FloatRangeNode.is(node) ? FloatRangeNode.destruct(node) : IntRangeNode.destruct(node)
return [min?.value, max?.value]
}
function convertLiteral(node: LiteralTypeNode, ctx: McdocBinderContext): LiteralType {
const { value } = LiteralTypeNode.destruct(node)
return {
...convertBase(node, ctx),
kind: 'literal',
value: convertLiteralValue(value, ctx),
}
}
function convertLiteralValue(node: LiteralTypeValueNode, ctx: McdocBinderContext): LiteralValue {
if (LiteralNode.is(node)) {
return {
kind: 'boolean',
value: node.value === 'true',
}
} else if (TypedNumberNode.is(node)) {
const { suffix, value } = TypedNumberNode.destruct(node)
return {
kind: 'number',
value: value.value,
suffix: convertLiteralNumberSuffix(suffix, ctx),
}
} else {
return {
kind: 'string',
value: node.value,
}
}
}
function convertLiteralNumberSuffix(node: LiteralNode | undefined, ctx: McdocBinderContext): LiteralNumberSuffix | undefined {
const suffix = node?.value as LiteralNumberCaseInsensitiveSuffix | undefined
return suffix?.toLowerCase() as Lowercase<Exclude<typeof suffix, undefined>> | undefined
}
function convertNumericType(node: NumericTypeNode, ctx: McdocBinderContext): NumericType {
const { numericKind, valueRange } = NumericTypeNode.destruct(node)
return {
...convertBase(node, ctx),
kind: numericKind.value as NumericTypeKind,
valueRange: convertRange(valueRange, ctx),
}
}
function convertPrimitiveArray(node: PrimitiveArrayTypeNode, ctx: McdocBinderContext): PrimitiveArrayType {
const { arrayKind, lengthRange, valueRange } = PrimitiveArrayTypeNode.destruct(node)
return {
...convertBase(node, ctx),
kind: `${arrayKind.value as PrimitiveArrayValueKind}_array`,
lengthRange: convertRange(lengthRange, ctx),
valueRange: convertRange(valueRange, ctx),
}
}
function convertString(node: StringTypeNode, ctx: McdocBinderContext): StringType {
const { lengthRange } = StringTypeNode.destruct(node)
return {
...convertBase(node, ctx),
kind: 'string',
lengthRange: convertRange(lengthRange, ctx),
}
}
function convertReference(node: ReferenceTypeNode, ctx: McdocBinderContext): ReferenceType {
const { path, typeParameters } = ReferenceTypeNode.destruct(node)
return {
...convertBase(node, ctx),
kind: 'reference',
path: pathArrayToString(resolvePath(path, ctx)),
typeParameters: undefineEmptyArray(typeParameters.map(n => convertType(n, ctx))),
}
}
function convertTuple(node: TupleTypeNode, ctx: McdocBinderContext): TupleType {
const { items } = TupleTypeNode.destruct(node)
return {
...convertBase(node, ctx),
kind: 'tuple',
items: items.map(n => convertType(n, ctx)),
}
}
function convertUnion(node: UnionTypeNode, ctx: McdocBinderContext): UnionType {
const { members } = UnionTypeNode.destruct(node)
return {
...convertBase(node, ctx),
kind: 'union',
members: members.map(n => convertType(n, ctx)),
}
}
function asString(node: IdentifierNode | LiteralNode | StringNode | ResourceLocationNode): string {
if (ResourceLocationNode.is(node)) {
return ResourceLocationNode.toString(node, 'short')
}
return node.value
} | the_stack |
import Vector2 from './Vector2';
import Ring from './Ring';
export interface IThemeColols {
C: string;
O: string;
N: string;
F: string;
CL: string;
BR: string;
I: string;
P: string;
S: string;
B: string;
SI: string;
H: string;
BACKGROUND: string;
}
export interface ISmilesOptionsDef {
width?: number;
height?: number;
bondThickness?: number;
bondLength?: number;
shortBondLength?: number;
bondSpacing?: number;
dCircle?: number;
atomVisualization?: string;
ringVisualization?: string;
ringAromaticVisualization?: string;
isomeric?: boolean;
debug?: boolean;
terminalCarbons?: boolean;
explicitHydrogens?: boolean;
overlapSensitivity?: number;
overlapResolutionIterations?: number;
compactDrawing?: boolean;
fontSizeLarge?: number;
fontSizeSmall?: number;
padding?: number;
experimentalSSSR?: boolean;
kkThreshold?: number;
kkInnerThreshold?: number;
kkMaxIteration?: number;
kkMaxInnerIteration?: number;
kkMaxEnergy?: number;
themes?: {
dark: IThemeColols;
light: IThemeColols;
};
}
export interface IOptions extends ISmilesOptionsDef {
id?: string;
theme?: string;
halfBondSpacing?: number;
bondLengthSq?: number;
halfFontSizeLarge?: number;
quarterFontSizeLarge?: number;
fifthFontSizeSmall?: number;
}
/**
* The main class of the application representing the smiles drawer
*
* @property {Graph} graph The graph associated with this SmilesDrawer.Drawer instance.
* @property {Number} ringIdCounter An internal counter to keep track of ring ids.
* @property {Number} ringConnectionIdCounter An internal counter to keep track of ring connection ids.
* @property {CanvasWrapper} canvasWrapper The CanvasWrapper associated with this SmilesDrawer.Drawer instance.
* @property {Number} totalOverlapScore The current internal total overlap score.
* @property {Object} defaultOptions The default options.
* @property {Object} opts The merged options.
* @property {Object} theme The current theme.
*/
declare class Drawer {
graph: any;
doubleBondConfigCount: any;
doubleBondConfig: any;
ringIdCounter: any;
ringConnectionIdCounter: any;
canvasWrapper: any;
totalOverlapScore: any;
defaultOptions: ISmilesOptionsDef;
opts: IOptions;
theme: any;
infoOnly: any;
themeManager: any;
rings: any;
ringConnections: any;
bridgedRing: any;
data: any;
originalRings: any;
originalRingConnections: any;
/**
* The constructor for the class SmilesDrawer.
*
* @param {Object} options An object containing custom values for different options. It is merged with the default options.
*/
constructor(options: any);
/**
* A helper method to extend the default options with user supplied ones.
*/
extend(): {};
/**
* Draws the parsed smiles data to a canvas element.
*
* @param {Object} data The tree returned by the smiles parser.
* @param {(String|HTMLElement)} target The id of the HTML canvas element the structure is drawn to - or the element itself.
* @param {String} themeName='dark' The name of the theme to use. Built-in themes are 'light' and 'dark'.
* @param {Boolean} infoOnly=false Only output info on the molecule without drawing anything to the canvas.
*/
draw(data: any, target: any, themeName?: string, infoOnly?: boolean): void;
/**
* Returns the number of rings this edge is a part of.
*
* @param {Number} edgeId The id of an edge.
* @returns {Number} The number of rings the provided edge is part of.
*/
edgeRingCount(edgeId: any): number;
/**
* Returns an array containing the bridged rings associated with this molecule.
*
* @returns {Ring[]} An array containing all bridged rings associated with this molecule.
*/
getBridgedRings(): any[];
/**
* Returns an array containing all fused rings associated with this molecule.
*
* @returns {Ring[]} An array containing all fused rings associated with this molecule.
*/
getFusedRings(): any[];
/**
* Returns an array containing all spiros associated with this molecule.
*
* @returns {Ring[]} An array containing all spiros associated with this molecule.
*/
getSpiros(): any[];
/**
* Returns a string containing a semicolon and new-line separated list of ring properties: Id; Members Count; Neighbours Count; IsSpiro; IsFused; IsBridged; Ring Count (subrings of bridged rings)
*
* @returns {String} A string as described in the method description.
*/
printRingInfo(): string;
/**
* Rotates the drawing to make the widest dimension horizontal.
*/
rotateDrawing(): void;
/**
* Returns the total overlap score of the current molecule.
*
* @returns {Number} The overlap score.
*/
getTotalOverlapScore(): any;
/**
* Returns the ring count of the current molecule.
*
* @returns {Number} The ring count.
*/
getRingCount(): any;
/**
* Checks whether or not the current molecule a bridged ring.
*
* @returns {Boolean} A boolean indicating whether or not the current molecule a bridged ring.
*/
hasBridgedRing(): any;
/**
* Returns the number of heavy atoms (non-hydrogen) in the current molecule.
*
* @returns {Number} The heavy atom count.
*/
getHeavyAtomCount(): number;
/**
* Returns the molecular formula of the loaded molecule as a string.
*
* @returns {String} The molecular formula.
*/
getMolecularFormula(): string;
/**
* Returns the type of the ringbond (e.g. '=' for a double bond). The ringbond represents the break in a ring introduced when creating the MST. If the two vertices supplied as arguments are not part of a common ringbond, the method returns null.
*
* @param {Vertex} vertexA A vertex.
* @param {Vertex} vertexB A vertex.
* @returns {(String|null)} Returns the ringbond type or null, if the two supplied vertices are not connected by a ringbond.
*/
getRingbondType(vertexA: any, vertexB: any): any;
initDraw(data: any, themeName: any, infoOnly: any): void;
processGraph(): void;
/**
* Initializes rings and ringbonds for the current molecule.
*/
initRings(): void;
initHydrogens(): void;
/**
* Returns all rings connected by bridged bonds starting from the ring with the supplied ring id.
*
* @param {Number} ringId A ring id.
* @returns {Number[]} An array containing all ring ids of rings part of a bridged ring system.
*/
getBridgedRingRings(ringId: any): any;
/**
* Checks whether or not a ring is part of a bridged ring.
*
* @param {Number} ringId A ring id.
* @returns {Boolean} A boolean indicating whether or not the supplied ring (by id) is part of a bridged ring system.
*/
isPartOfBridgedRing(ringId: any): boolean;
/**
* Creates a bridged ring.
*
* @param {Number[]} ringIds An array of ids of rings involved in the bridged ring.
* @param {Number} sourceVertexId The vertex id to start the bridged ring discovery from.
* @returns {Ring} The bridged ring.
*/
createBridgedRing(ringIds: any, sourceVertexId: any): Ring;
/**
* Checks whether or not two vertices are in the same ring.
*
* @param {Vertex} vertexA A vertex.
* @param {Vertex} vertexB A vertex.
* @returns {Boolean} A boolean indicating whether or not the two vertices are in the same ring.
*/
areVerticesInSameRing(vertexA: any, vertexB: any): boolean;
/**
* Returns an array of ring ids shared by both vertices.
*
* @param {Vertex} vertexA A vertex.
* @param {Vertex} vertexB A vertex.
* @returns {Number[]} An array of ids of rings shared by the two vertices.
*/
getCommonRings(vertexA: any, vertexB: any): any[];
/**
* Returns the aromatic or largest ring shared by the two vertices.
*
* @param {Vertex} vertexA A vertex.
* @param {Vertex} vertexB A vertex.
* @returns {(Ring|null)} If an aromatic common ring exists, that ring, else the largest (non-aromatic) ring, else null.
*/
getLargestOrAromaticCommonRing(vertexA: any, vertexB: any): any;
/**
* Returns an array of vertices positioned at a specified location.
*
* @param {Vector2} position The position to search for vertices.
* @param {Number} radius The radius within to search.
* @param {Number} excludeVertexId A vertex id to be excluded from the search results.
* @returns {Number[]} An array containing vertex ids in a given location.
*/
getVerticesAt(position: any, radius: any, excludeVertexId: any): any[];
/**
* Returns the closest vertex (connected as well as unconnected).
*
* @param {Vertex} vertex The vertex of which to find the closest other vertex.
* @returns {Vertex} The closest vertex.
*/
getClosestVertex(vertex: any): any;
/**
* Add a ring to this representation of a molecule.
*
* @param {Ring} ring A new ring.
* @returns {Number} The ring id of the new ring.
*/
addRing(ring: any): any;
/**
* Removes a ring from the array of rings associated with the current molecule.
*
* @param {Number} ringId A ring id.
*/
removeRing(ringId: any): void;
/**
* Gets a ring object from the array of rings associated with the current molecule by its id. The ring id is not equal to the index, since rings can be added and removed when processing bridged rings.
*
* @param {Number} ringId A ring id.
* @returns {Ring} A ring associated with the current molecule.
*/
getRing(ringId: any): any;
/**
* Add a ring connection to this representation of a molecule.
*
* @param {RingConnection} ringConnection A new ringConnection.
* @returns {Number} The ring connection id of the new ring connection.
*/
addRingConnection(ringConnection: any): any;
/**
* Removes a ring connection from the array of rings connections associated with the current molecule.
*
* @param {Number} ringConnectionId A ring connection id.
*/
removeRingConnection(ringConnectionId: any): void;
/**
* Removes all ring connections between two vertices.
*
* @param {Number} vertexIdA A vertex id.
* @param {Number} vertexIdB A vertex id.
*/
removeRingConnectionsBetween(vertexIdA: any, vertexIdB: any): void;
/**
* Get a ring connection with a given id.
*
* @param {Number} id
* @returns {RingConnection} The ring connection with the specified id.
*/
getRingConnection(id: any): any;
/**
* Get the ring connections between a ring and a set of rings.
*
* @param {Number} ringId A ring id.
* @param {Number[]} ringIds An array of ring ids.
* @returns {Number[]} An array of ring connection ids.
*/
getRingConnections(ringId: any, ringIds: any): any[];
/**
* Returns the overlap score of the current molecule based on its positioned vertices. The higher the score, the more overlaps occur in the structure drawing.
*
* @returns {Object} Returns the total overlap score and the overlap score of each vertex sorted by score (higher to lower). Example: { total: 99, scores: [ { id: 0, score: 22 }, ... ] }
*/
getOverlapScore(): {
total: number;
scores: any[];
vertexScores: Float32Array;
};
/**
* When drawing a double bond, choose the side to place the double bond. E.g. a double bond should always been drawn inside a ring.
*
* @param {Vertex} vertexA A vertex.
* @param {Vertex} vertexB A vertex.
* @param {Vector2[]} sides An array containing the two normals of the line spanned by the two provided vertices.
* @returns {Object} Returns an object containing the following information: {
totalSideCount: Counts the sides of each vertex in the molecule, is an array [ a, b ],
totalPosition: Same as position, but based on entire molecule,
sideCount: Counts the sides of each neighbour, is an array [ a, b ],
position: which side to position the second bond, is 0 or 1, represents the index in the normal array. This is based on only the neighbours
anCount: the number of neighbours of vertexA,
bnCount: the number of neighbours of vertexB
}
*/
chooseSide(vertexA: any, vertexB: any, sides: any): {
totalSideCount: number[];
totalPosition: number;
sideCount: number[];
position: number;
anCount: any;
bnCount: any;
};
/**
* Sets the center for a ring.
*
* @param {Ring} ring A ring.
*/
setRingCenter(ring: any): void;
/**
* Gets the center of a ring contained within a bridged ring and containing a given vertex.
*
* @param {Ring} ring A bridged ring.
* @param {Vertex} vertex A vertex.
* @returns {Vector2} The center of the subring that containing the vertex.
*/
getSubringCenter(ring: any, vertex: any): any;
/**
* Draw the actual edges as bonds to the canvas.
*
* @param {Boolean} debug A boolean indicating whether or not to draw debug helpers.
*/
drawEdges(debug: any): void;
/**
* Draw the an edge as a bonds to the canvas.
*
* @param {Number} edgeId An edge id.
* @param {Boolean} debug A boolean indicating whether or not to draw debug helpers.
*/
drawEdge(edgeId: any, debug: any): void;
/**
* Draws the vertices representing atoms to the canvas.
*
* @param {Boolean} debug A boolean indicating whether or not to draw debug messages to the canvas.
*/
drawVertices(debug: any): void;
/**
* Position the vertices according to their bonds and properties.
*/
position(): void;
/**
* Stores the current information associated with rings.
*/
backupRingInformation(): void;
/**
* Restores the most recently backed up information associated with rings.
*/
restoreRingInformation(): void;
/**
* Creates a new ring, that is, positiones all the vertices inside a ring.
*
* @param {Ring} ring The ring to position.
* @param {(Vector2|null)} [center=null] The center of the ring to be created.
* @param {(Vertex|null)} [startVertex=null] The first vertex to be positioned inside the ring.
* @param {(Vertex|null)} [previousVertex=null] The last vertex that was positioned.
* @param {Boolean} [previousVertex=false] A boolean indicating whether or not this ring was force positioned already - this is needed after force layouting a ring, in order to draw rings connected to it.
*/
createRing(ring: any, center?: any, startVertex?: any, previousVertex?: any): void;
/**
* Rotate an entire subtree by an angle around a center.
*
* @param {Number} vertexId A vertex id (the root of the sub-tree).
* @param {Number} parentVertexId A vertex id in the previous direction of the subtree that is to rotate.
* @param {Number} angle An angle in randians.
* @param {Vector2} center The rotational center.
*/
rotateSubtree(vertexId: any, parentVertexId: any, angle: any, center: any): void;
/**
* Gets the overlap score of a subtree.
*
* @param {Number} vertexId A vertex id (the root of the sub-tree).
* @param {Number} parentVertexId A vertex id in the previous direction of the subtree.
* @param {Number[]} vertexOverlapScores An array containing the vertex overlap scores indexed by vertex id.
* @returns {Object} An object containing the total overlap score and the center of mass of the subtree weighted by overlap score { value: 0.2, center: new Vector2() }.
*/
getSubtreeOverlapScore(vertexId: any, parentVertexId: any, vertexOverlapScores: any): {
value: number;
center: Vector2;
};
/**
* Returns the current (positioned vertices so far) center of mass.
*
* @returns {Vector2} The current center of mass.
*/
getCurrentCenterOfMass(): Vector2;
/**
* Returns the current (positioned vertices so far) center of mass in the neighbourhood of a given position.
*
* @param {Vector2} vec The point at which to look for neighbours.
* @param {Number} [r=currentBondLength*2.0] The radius of vertices to include.
* @returns {Vector2} The current center of mass.
*/
getCurrentCenterOfMassInNeigbourhood(vec: any, r?: number): Vector2;
/**
* Resolve primary (exact) overlaps, such as two vertices that are connected to the same ring vertex.
*/
resolvePrimaryOverlaps(): void;
/**
* Resolve secondary overlaps. Those overlaps are due to the structure turning back on itself.
*
* @param {Object[]} scores An array of objects sorted descending by score.
* @param {Number} scores[].id A vertex id.
* @param {Number} scores[].score The overlap score associated with the vertex id.
*/
resolveSecondaryOverlaps(scores: any): void;
/**
* Get the last non-null or 0 angle vertex.
* @param {Number} vertexId A vertex id.
* @returns {Vertex} The last vertex with an angle that was not 0 or null.
*/
getLastVertexWithAngle(vertexId: any): any;
/**
* Positiones the next vertex thus creating a bond.
*
* @param {Vertex} vertex A vertex.
* @param {Vertex} [previousVertex=null] The previous vertex which has been positioned.
* @param {Number} [angle=0.0] The (global) angle of the vertex.
* @param {Boolean} [originShortest=false] Whether the origin is the shortest subtree in the branch.
* @param {Boolean} [skipPositioning=false] Whether or not to skip positioning and just check the neighbours.
*/
createNextBond(vertex: any, previousVertex?: any, angle?: number, originShortest?: boolean, skipPositioning?: boolean): void;
/**
* Gets the vetex sharing the edge that is the common bond of two rings.
*
* @param {Vertex} vertex A vertex.
* @returns {(Number|null)} The id of a vertex sharing the edge that is the common bond of two rings with the vertex provided or null, if none.
*/
getCommonRingbondNeighbour(vertex: any): any;
/**
* Check if a vector is inside any ring.
*
* @param {Vector2} vec A vector.
* @returns {Boolean} A boolean indicating whether or not the point (vector) is inside any of the rings associated with the current molecule.
*/
isPointInRing(vec: any): boolean;
/**
* Check whether or not an edge is part of a ring.
*
* @param {Edge} edge An edge.
* @returns {Boolean} A boolean indicating whether or not the edge is part of a ring.
*/
isEdgeInRing(edge: any): boolean;
/**
* Check whether or not an edge is rotatable.
*
* @param {Edge} edge An edge.
* @returns {Boolean} A boolean indicating whether or not the edge is rotatable.
*/
isEdgeRotatable(edge: any): boolean;
/**
* Check whether or not a ring is an implicitly defined aromatic ring (lower case smiles).
*
* @param {Ring} ring A ring.
* @returns {Boolean} A boolean indicating whether or not a ring is implicitly defined as aromatic.
*/
isRingAromatic(ring: any): boolean;
/**
* Get the normals of an edge.
*
* @param {Edge} edge An edge.
* @returns {Vector2[]} An array containing two vectors, representing the normals.
*/
getEdgeNormals(edge: any): Vector2[];
/**
* Returns an array of vertices that are neighbouring a vertix but are not members of a ring (including bridges).
*
* @param {Number} vertexId A vertex id.
* @returns {Vertex[]} An array of vertices.
*/
getNonRingNeighbours(vertexId: any): any[];
/**
* Annotaed stereochemistry information for visualization.
*/
annotateStereochemistry(): void;
/**
*
*
* @param {Number} vertexId The id of a vertex.
* @param {(Number|null)} previousVertexId The id of the parent vertex of the vertex.
* @param {Uint8Array} visited An array containing the visited flag for all vertices in the graph.
* @param {Array} priority An array of arrays storing the atomic numbers for each level.
* @param {Number} maxDepth The maximum depth.
* @param {Number} depth The current depth.
*/
visitStereochemistry(vertexId: any, previousVertexId: any, visited: any, priority: any, maxDepth: any, depth: any, parentAtomicNumber?: number): void;
/**
* Creates pseudo-elements (such as Et, Me, Ac, Bz, ...) at the position of the carbon sets
* the involved atoms not to be displayed.
*/
initPseudoElements(): void;
}
export default Drawer; | the_stack |
import { MonacoEditor } from '../../../monaco-editor/monaco-editor';
import { Polymer } from '../../../../../../../tools/definitions/polymer';
const EXTERNAL_EDITOR_APP_ID = 'hkjjmhkhhlmkflpihbikfpcojeofbjgn';
declare const browserAPI: browserAPI;
declare const BrowserAPI: BrowserAPI;
declare global {
type ChooseFileDialog = PaperDialogBase & {
init(local: string, file: string, callback: (result: string|false) => void, isUpdate?: boolean,
updateErrors?: {
parseError?: boolean;
generalError?: boolean;
newScript: CursorPosition[];
oldScript: CursorPosition[];
}): void;
local: string;
file: string;
callback(result: string|false): void;
};
}
namespace UseExternalEditorElement {
interface SetupConnectionMessage {
status: 'connecting';
stage: 1;
message: 'hey';
connectionId: number;
}
interface PingMessage {
status: 'ping';
message: 'hello?';
connectionId: number;
}
interface ChooseFileMessage {
status: 'connected';
action: 'chooseFile';
local: string;
external: string;
connectionId: number;
}
interface SetupExistingFileMessage {
status: 'connected';
action: 'setupStylesheet'|'setupScript';
existed: true;
path: string;
connectionId: number;
}
interface SetupNewFileMessage {
status: 'connected';
action: 'setupStylesheet'|'setupScript';
existed: false;
id: number;
path: string;
connectionId: number;
}
interface UpdateFromAppMessage {
status: 'connected';
action: 'updateFromApp';
connectionId: number;
code: string;
}
type ConnectedEditorMessage = ChooseFileMessage|SetupExistingFileMessage|SetupNewFileMessage|
UpdateFromAppMessage;
type ExternalEditorMessage = SetupConnectionMessage|PingMessage|ConnectedEditorMessage;
export class UEE {
static is: string = 'use-external-editor';
/**
* The port at which the app is located
*/
private static appPort: _browser.runtime.Port = null;
/**
* The connection to the app and its status
*/
private static connection: {
status: string;
state?: string;
connected: boolean;
id?: number;
filePath?: string;
fileConnected?: boolean;
stage?: number;
} = {
status: 'no connection',
connected: false
};
/**
* The choose-script hide animation for the main div
*/
private static dialogMainDivAnimationHide: Animation = null;
/**
* The choose-script show animation for the comparison div
*/
private static dialogComparisonDivAnimationShow: Animation = null;
/**
* The animation for expanding the dialog
*/
private static dialogExpansionAnimation: Animation = null;
/**
* The CodeMirror editor used to merge your code
*/
private static editorManager: MonacoEditor = null;
/**
* The style properties of the dialog before expanding
*/
private static dialogStyleProperties: ClientRect;
/**
* The node that is currently being edited
*/
static editingCRMItem: (CRM.ScriptNode|CRM.StylesheetNode) & {
file: {
id: number;
path: string;
}
};
/**
* Initialize for use
*/
static async init(this: UseExternalEditor) {
await window.onExists('app');
window.doc.externalEditorDialogTrigger.style.color = 'rgb(38, 153, 244)';
window.doc.externalEditorDialogTrigger.classList.remove('disabled');
window.doc.externalEditorDialogTrigger.disabled = false;
};
/**
* Notifies the user if something went wrong
*
* @param {string} error - What went wrong
*/
private static errorHandler(this: UseExternalEditor, error: string = 'Something went wrong') {
const toast = window.doc.externalEditorErrorToast;
toast.text = error;
toast.show();
};
private static postMessage(this: UseExternalEditor, msg: any) {
try {
this.appPort.postMessage(msg);
} catch (e) {
//Closed connection
}
};
/**
* Updates the local copy of the script to what the external file's value is (external->local)
*/
private static updateFromExternal(this: UseExternalEditor, msg: UpdateFromAppMessage) {
if (this.connection.id === msg.connectionId) {
if (window.scriptEdit && window.scriptEdit.active) {
window.scriptEdit.editorManager.setValue(msg.code);
} else {
window.stylesheetEdit.newSettings.value.stylesheet = msg.code;
window.stylesheetEdit.editorManager.setValue(msg.code);
}
}
};
static cancelOpenFiles(this: UseExternalEditor) {
window.doc.externalEditorDialogTrigger.style.color = 'rgb(38, 153, 244)';
window.doc.externalEditorDialogTrigger.classList.remove('disabled');
window.doc.externalEditorDialogTrigger.disabled = false;
try {
this.appPort.postMessage({
status: 'connected',
action: 'disconnect'
});
} catch (e) { }
};
private static EditingOverlay = class EditingOverlay {
private static createToolsCont(): HTMLElement {
const toolsCont = window.app.util.createElement('div', {
id: 'externalEditingTools'
});
toolsCont.appendChild(window.app.util.createElement('div', {
id: 'externalEditingToolsTitle'
}, ['Using external editor']));
return toolsCont;
}
private static createDisconnect() {
const onDc = {
fn: () => {}
}
const el = window.app.util.createElement('div', {
id: 'externalEditingToolsDisconnect'
}, [
window.app.util.createElement('div', {
classes: ['paper-material'],
props: {
elevation: '1'
}
}, [
window.app.util.createElement('paper-ripple', {}),
window.app.util.createElement('svg', {
props: {
xmlns: 'http://www.w3.org/2000/svg',
height: '70',
width: '70',
viewBox: '0 0 24 24'
}
}, [
window.app.util.createElement('path', {
props: {
d: 'M19 6.41L17.59 5 12 10.59 6.41 5 5 6.41 10.59 12 5 17' +
'.59 6.41 19 12 13.41 17.59 19 19 17.59 13.41 12z'
}
}),
window.app.util.createElement('path', {
props: {
d: 'M0 0h24v24H0z',
fill: 'none'
}
})
]),
window.app.util.createElement('div', {
classes: ['externalEditingToolText']
}, ['Stop'])
])
]);
el.addEventListener('click', () => {
this.parent().cancelOpenFiles.apply(this, []);
onDc.fn && onDc.fn();
});
return {
el, onDc
}
}
private static createShowLocation() {
const el = window.app.util.createElement('div', {
id: 'externalEditingToolsShowLocation'
}, [
window.app.util.createElement('div', {
classes: ['paper-material'],
props: {
elevation: '1'
}
}, [
window.app.util.createElement('paper-ripple', {}),
window.app.util.createElement('svg', {
props: {
height: '70',
viewBox: '0 0 24 24',
width: '70',
xmlns: 'http://www.w3.org/2000/svg'
}
}, [
window.app.util.createElement('path', {
props: {
d: 'M0 0h24v24H0z',
fill: 'none'
}
}),
window.app.util.createElement('path', {
props: {
d: 'M20 6h-8l-2-2H4c-1.1 0-1.99.9-1.99 2L2 ' +
'18c0 1.1.9 2 2 2h16c1.1 0 2-.9 2-2V8c0' +
'-1.1-.9-2-2-2zm0 12H4V8h16v10z'
}
})
])
]),
window.app.util.createElement('div', {
classes: ['externalEditingToolText']
}, ['Location'])
]);
el.addEventListener('click', () => {
//Show location toast
let location = this.parent().connection.filePath;
location = location.replace(/\\/g, '/');
window.doc.externalEditoOpenLocationInBrowser.setAttribute('href', 'file:///' + location);
const externalEditorLocationToast = window.doc.externalEditorLocationToast;
externalEditorLocationToast.text = 'File is located at: ' + location;
externalEditorLocationToast.show();
});
return el;
}
private static createNewFile() {
window.app.util.createElement('div', {
id: 'externalEditingToolsCreateNewFile'
}, [
window.app.util.createElement('div', {
classes: ['paper-material'],
props: {
elevation: '1'
}
}, [
window.app.util.createElement('paper-ripple', {}),
window.app.util.createElement('svg', {
props: {
height: '70',
width: '70',
xmlns: 'http://www.w3.org/2000/svg',
viewBox: '0 0 24 24'
}
}, [
window.app.util.createElement('path', {
props: {
d: 'M6 2c-1.1 0-1.99.9-1.99 2L4 20c0 1.1' +
'.89 2 1.99 2H18c1.1 0 2-.9 2-2V8l-6' +
'-6H6zm7 7V3.5L18.5 9H13z'
}
}),
window.app.util.createElement('path', {
props: {
d: 'M0 0h24v24H0z',
fill: 'none'
}
})
])
]),
window.app.util.createElement('div', {
classes: ['externalEditingToolText']
}, ['Move'])
]).addEventListener('click', () => {
this.parent().postMessage({
status: 'connected',
action: 'createNewFile',
isCss: (!!window.scriptEdit),
name: this.parent().editingCRMItem.name
});
});
}
private static createUpdate() {
const el = window.app.util.createElement('div', {
id: 'externalEditingToolsUpdate'
}, [
window.app.util.createElement('div', {
classes: ['paper-material'],
props: {
elevation: '1'
}
}, [
window.app.util.createElement('paper-ripple', {}),
window.app.util.createElement('svg', {
props: {
height: '70',
width: '70',
xmlns: 'http://www.w3.org/2000/svg',
viewBox: '0 0 24 24'
}
}, [
window.app.util.createElement('path', {
props: {
d: 'M17.65 6.35C16.2 4.9 14.21 4 12 4c-4.42 0-7.99 3.58' +
'-7.99 8s3.57 8 7.99 8c3.73 0 6.84-2.55 7.73-6h-2.0' +
'8c-.82 2.33-3.04 4-5.65 4-3.31 0-6-2.69-6-6s2.69-6' +
' 6-6c1.66 0 3.14.69 4.22 1.78L13 11h7V4l-2.35 2.35z'
}
}),
window.app.util.createElement('path', {
props: {
d: 'M0 0h24v24H0z',
fill: 'none'
}
})
])
]),
window.app.util.createElement('div', {
classes: ['externalEditingToolText']
}, ['Refresh'])
]);
el.addEventListener('click', () => {
this.parent().postMessage({
status: 'connected',
action: 'refreshFromApp'
});
});
return el;
}
private static createCont(toolsCont: HTMLElement) {
const cont = toolsCont.appendChild(window.app.util.createElement('div', {
id: 'externalEditingToolsButtonsCont'
}));
const { el, onDc } = this.createDisconnect();
cont.appendChild(el)
cont.appendChild(this.createShowLocation());
this.createNewFile();
cont.appendChild(this.createUpdate());
return onDc;
}
private static appendWrapper(toolsCont: HTMLElement) {
const editorManager = window.codeEditBehavior.getEditor();
const editor = editorManager.editor as monaco.editor.IStandaloneCodeEditor|monaco.editor.IDiffEditor;
if (editorManager.isDiff(editor)) {
return null;
}
const widget = {
getDomNode() {
return toolsCont;
},
getId() {
return 'external.editor.overlay';
},
getPosition() {
return {
preference: monaco.editor.OverlayWidgetPositionPreference.TOP_RIGHT_CORNER
};
}
};
(editor as any).addOverlayWidget(widget);
return {
editor,
widget
};
}
private static linkDisconnect(onDc: {
fn: () => void;
}, editorData: {
widget: monaco.editor.IOverlayWidget;
editor: monaco.editor.ICodeEditor;
}) {
const { widget, editor } = editorData;
onDc.fn = () => {
editor.removeOverlayWidget(widget);
}
}
static generateOverlay() {
const toolsCont = this.createToolsCont();
const onDc = this.createCont(toolsCont)
const editorData = this.appendWrapper(toolsCont);
this.linkDisconnect(onDc, editorData as any);
}
static parent() {
return window.externalEditor;
}
}
private static createEditingOverlay(this: UseExternalEditor) {
window.doc.externalEditorDialogTrigger.style.color = 'rgb(175, 175, 175)';
window.doc.externalEditorDialogTrigger.disabled = true;
window.doc.externalEditorDialogTrigger.classList.add('disabled');
this.EditingOverlay.generateOverlay()
};
static setupExternalEditing(this: UseExternalEditor) {
//Send a message to the app to create the item with its current script and name
const _this = this;
if (BrowserAPI.getBrowser() !== 'chrome') {
window.app.util.showToast('This feature is only available in chrome' +
' (until google removes chrome apps)');
return;
}
const manager = window.codeEditBehavior.getEditor();
if (manager.isTextarea(manager.getEditorAsMonaco())) {
window.app.util.showToast('Please update your chrome (at least chrome 30) to use this feature');
return;
}
if (this.connection.connected) {
const item = this.editingCRMItem;
const tempListener = function (msg: SetupExistingFileMessage | SetupNewFileMessage) {
if (msg.status === 'connected' && (msg.action === 'setupScript' || msg.action === 'setupStylesheet') && msg.connectionId === _this.connection.id) {
if (msg.existed === false) {
item.file = {
id: msg.id,
path: msg.path
};
}
_this.connection.filePath = msg.path;
window.app.upload();
_this.connection.fileConnected = true;
(window.scriptEdit && window.scriptEdit.active ? window.scriptEdit.reloadEditor() : window.stylesheetEdit.reloadEditor());
_this.createEditingOverlay();
_this.appPort.onMessage.removeListener();
}
};
this.appPort.onMessage.addListener(tempListener);
if (item.file) {
this.appPort.postMessage({
status: 'connected',
action: (window.scriptEdit && window.scriptEdit.active ? 'setupScript' : 'setupStylesheet'),
name: item.name,
code: (window.scriptEdit && window.scriptEdit.active ?
(item as CRM.ScriptNode).value.script : (item as CRM.StylesheetNode).value.stylesheet),
id: item.file.id
});
} else {
this.appPort.postMessage({
status: 'connected',
action: (window.scriptEdit && window.scriptEdit.active ? 'setupScript' : 'setupStylesheet'),
name: item.name,
code: (window.scriptEdit && window.scriptEdit.active ?
(item as CRM.ScriptNode).value.script : (item as CRM.StylesheetNode).value.stylesheet),
});
}
} else {
_this.errorHandler('Could not establish connection');
}
};
/**
* Sets up the external messages sent to go this element's handler
*/
static setupMessageHandler(this: UseExternalEditor) {
browserAPI.runtime.onConnectExternal.addListener((port) => {
if (port.sender.id === 'obnfehdnkjmbijebdllfcnccllcfceli') {
port.onMessage.addListener((msg: any) => {
this.messageHandler(msg);
});
}
});
};
private static appMessageHandler(this: UseExternalEditor, msg: ConnectedEditorMessage) {
switch (msg.action) {
case 'chooseFile':
const chooseFileDialog = window.doc.externalEditorChooseFile;
chooseFileDialog.init(msg.local, msg.external, (result) => {
if (result !== false) {
const editor = window.codeEditBehavior.getEditor();
editor.setValue(result);
this.appPort.postMessage({
status: 'connected',
action: 'chooseFile',
code: result
});
} else {
chooseFileDialog.close();
}
});
chooseFileDialog.open();
break;
case 'updateFromApp':
this.updateFromExternal(msg);
break;
}
};
/**
* Takes actions based on what messages are received from the other extension
*/
private static messageHandler(this: UseExternalEditor, msg: ExternalEditorMessage) {
switch (msg.status) {
case 'connected':
this.appMessageHandler(msg);
break;
case 'ping':
this.appPort.postMessage({
status: 'ping',
message: 'received'
});
break;
}
};
/**
* Tries to establish a connection to the app (if installed)
*/
private static establishConnection(this: UseExternalEditor) {
if (!this.appPort) {
this.appPort = browserAPI.runtime.connect(EXTERNAL_EDITOR_APP_ID);
this.connection.status = 'connecting';
this.connection.stage = 0;
this.connection.fileConnected = false;
Promise.race([
new Promise<ExternalEditorMessage|false>((resolve) => {
let connected: boolean = false;
this.appPort.onMessage.addListener((msg: any) => {
if (!connected && msg.status === 'connecting' &&
msg.stage === 1 && msg.message === 'hey') {
resolve(msg);
}
this.messageHandler(msg);
});
this.appPort.postMessage({
status: 'connecting',
message: 'hi',
stage: 0
});
}),
new Promise<ExternalEditorMessage|false>((resolve) => {
window.setTimeout(() => {
resolve(false);
}, 5000);
})
]).then((res) => {
if (res === false) {
this.errorHandler();
} else {
this.connection.stage = 2; //We have sent confirmation that we are there
this.appPort.postMessage({
status: 'connecting',
message: 'hello',
stage: 2
});
//Connection is now done
this.connection.connected = true;
this.connection.state = 'connected';
this.connection.id = res.connectionId;
}
});
}
};
static applyProps<T extends {
[key: string]: string|number;
}>(source: T, target: any, props: (keyof T)[]) {
for (let i = 0; i < props.length; i++) {
target[props[i]] = source[props[i]] + '';
}
}
static doCSSAnimation(element: {
animate: Function;
style: CSSStyleDeclaration;
}, [before, after] : [{
[key: string]: string|number;
}, {
[key: string]: string|number;
}], duration: number, callback?: () => void): Animation {
const animation = element.animate([before, after], {
duration,
easing: 'cubic-bezier(0.215, 0.610, 0.355, 1.000)'
});
animation.onfinish = () => {
this.applyProps(after, element.style, Object.getOwnPropertyNames(after));
callback && callback();
};
return animation;
}
static async initEditor(this: UseExternalEditor, oldScript: string, newScript: string) {
const mode = window.stylesheetEdit && window.stylesheetEdit.active ?
window.doc.chooseFileMerger.EditorMode.CSS_META : (
window.scriptEdit.isTsEnabled() ?
window.doc.chooseFileMerger.EditorMode.TS_META :
window.doc.chooseFileMerger.EditorMode.JS_META
)
this.editorManager = await window.doc.chooseFileMerger.createDiff([oldScript, newScript], mode, {
wordWrap: 'off',
fontSize: (~~window.app.settings.editor.zoom / 100) * 14,
folding: true
});
}
private static playIfExists(animation: Animation|void): boolean {
if (animation && animation.play) {
animation.play();
return true;
}
return false;
}
static onDialogMainDivAnimationHideEnd(this: UseExternalEditor, dialogRect: ClientRect, dialogStyle: CSSStyleDeclaration,
oldScript: string, newScript: string) {
window.doc.chooseFileMainDialog.style.display = 'none';
window.doc.chooseFileMainDialog.style.marginTop = '0';
window.doc.chooseFileMainDialog.style.opacity = '1';
this.playIfExists(this.dialogExpansionAnimation) ||
(this.dialogExpansionAnimation = this.doCSSAnimation(window.doc.externalEditorChooseFile, [{
width: dialogRect.width,
height: dialogRect.height,
marginTop: '24px',
marginLeft: '40px',
marginBottom: '24px',
marginRight: '40px',
top: (dialogStyle.top || '0px'),
left: (dialogStyle.left || '0px')
}, {
width: '100vw',
height: '100vh',
marginTop: '0px',
marginLeft: '0px',
marginBottom: '0px',
marginRight: '0px',
top: '0px',
left: '0px'
}], 400, () => {
window.setDisplayFlex(window.doc.chooseFileMerger);
this.playIfExists(this.dialogComparisonDivAnimationShow) ||
(this.dialogComparisonDivAnimationShow = this.doCSSAnimation(window.doc.chooseFileMerger, [{
marginTop: '70px',
opacity: 0
}, {
marginTop: '0px',
opacity: 1
}], 250, () => {
if (!this.editorManager) {
setTimeout(() => {
this.initEditor(oldScript, newScript);
}, 150);
}
}));
}));
}
static showMergeDialog(this: UseExternalEditor, oldScript: string, newScript: string) {
//Animate the comparison in
const dialogRect = window.doc.externalEditorChooseFile.getBoundingClientRect();
const dialogStyle = window.doc.externalEditorChooseFile.style;
this.dialogStyleProperties = dialogRect;
dialogStyle.maxWidth = '100vw';
dialogStyle.width = dialogRect.width + 'px';
dialogStyle.height = dialogRect.height + 'px';
document.body.style.overflow = 'hidden';
window.doc.chooseFileMainDialog.style.position = 'absolute';
this.playIfExists(this.dialogMainDivAnimationHide) ||
(this.dialogMainDivAnimationHide = window.doc.chooseFileMainDialog.animate([
{
marginTop: '20px',
opacity: '1'
}, {
marginTop: '100px',
opacity: '0'
}
], {
duration: 240,
easing: 'cubic-bezier(0.215, 0.610, 0.355, 1.000)'
}));
this.dialogMainDivAnimationHide.onfinish = () => {
this.onDialogMainDivAnimationHideEnd(dialogRect, dialogStyle, oldScript, newScript)
};
};
private static resetStyles(target: CSSStyleDeclaration, source: ClientRect) {
target.width = source.width + 'px';
target.height = source.height + 'px';
target.top = source.top + 'px';
target.left = source.left + 'px';
}
private static chooseFileDialog(this: UseExternalEditor, chooseFileDialog: ChooseFileDialog): (local: string, file: string, callback: (result: string|false) => void,
isUpdate?: boolean, updateErrors?: {
parseError: boolean;
newScript: CursorPosition[];
oldScript: CursorPosition[];
}) => void {
const _this = this
return (local, file, callback) => {
chooseFileDialog.local = local;
chooseFileDialog.file = file;
chooseFileDialog.callback = callback;
_this.editorManager = null;
document.body.style.overflow = 'auto';
window.doc.chooseFileMainDialog.style.position = 'static';
window.doc.chooseFileMainDialog.style.display = 'block';
window.doc.chooseFileMerger.destroy();
if (_this.dialogStyleProperties) {
_this.resetStyles(chooseFileDialog.style, _this.dialogStyleProperties)
}
}
}
/**
* Makes the dialog clear itself after it closes
*/
static async ready(this: UseExternalEditor) {
window.externalEditor = this;
window.onfocus = () => {
if (this.connection.fileConnected) {
//File is connected, ask for an update
this.postMessage({
status: 'connected',
action: 'refreshFromApp'
});
}
};
await window.onExists('app');
const chooseFileDialog = window.doc.externalEditorChooseFile as ChooseFileDialog;
chooseFileDialog.init = this.chooseFileDialog(chooseFileDialog)
window.doc.externalEditorTryAgainButton.addEventListener('click', () => {
this.establishConnection();
window.doc.externalEditorErrorToast.hide();
});
window.doc.chooseFileChooseCRM.addEventListener('click', () => {
const editor = this.editorManager.editor;
if (this.editorManager.isDiff(editor)) {
chooseFileDialog.callback(editor.getModel().original.getValue());
}
});
window.doc.chooseFileChooseDisk.addEventListener('click', () => {
const editor = this.editorManager.editor;
if (this.editorManager.isDiff(editor)) {
chooseFileDialog.callback(editor.getModel().modified.getValue());
}
});
$('.closeChooseFileDialog').click(() => {
chooseFileDialog.callback(false);
});
}
}
if (window.register) {
window.register(UEE);
} else {
window.addEventListener('RegisterReady', () => {
window.register(UEE);
});
}
}
export type UseExternalEditor = Polymer.El<'use-external-editor', typeof UseExternalEditorElement.UEE>; | the_stack |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
import { ATN } from "antlr4ts/atn/ATN";
import { ATNDeserializer } from "antlr4ts/atn/ATNDeserializer";
import { FailedPredicateException } from "antlr4ts/FailedPredicateException";
import { NotNull } from "antlr4ts/Decorators";
import { NoViableAltException } from "antlr4ts/NoViableAltException";
import { Override } from "antlr4ts/Decorators";
import { Parser } from "antlr4ts/Parser";
import { ParserRuleContext } from "antlr4ts/ParserRuleContext";
import { ParserATNSimulator } from "antlr4ts/atn/ParserATNSimulator";
import { ParseTreeListener } from "antlr4ts/tree/ParseTreeListener";
import { ParseTreeVisitor } from "antlr4ts/tree/ParseTreeVisitor";
import { RecognitionException } from "antlr4ts/RecognitionException";
import { RuleContext } from "antlr4ts/RuleContext";
//import { RuleVersion } from "antlr4ts/RuleVersion";
import { TerminalNode } from "antlr4ts/tree/TerminalNode";
import { Token } from "antlr4ts/Token";
import { TokenStream } from "antlr4ts/TokenStream";
import { Vocabulary } from "antlr4ts/Vocabulary";
import { VocabularyImpl } from "antlr4ts/VocabularyImpl";
import * as Utils from "antlr4ts/misc/Utils";
import { ExpressionAntlrParserListener } from "./ExpressionAntlrParserListener";
import { ExpressionAntlrParserVisitor } from "./ExpressionAntlrParserVisitor";
export class ExpressionAntlrParser extends Parser {
public static readonly STRING_INTERPOLATION_START = 1;
public static readonly PLUS = 2;
public static readonly SUBSTRACT = 3;
public static readonly NON = 4;
public static readonly XOR = 5;
public static readonly ASTERISK = 6;
public static readonly SLASH = 7;
public static readonly PERCENT = 8;
public static readonly DOUBLE_EQUAL = 9;
public static readonly NOT_EQUAL = 10;
public static readonly SINGLE_AND = 11;
public static readonly DOUBLE_AND = 12;
public static readonly DOUBLE_VERTICAL_CYLINDER = 13;
public static readonly LESS_THAN = 14;
public static readonly MORE_THAN = 15;
public static readonly LESS_OR_EQUAl = 16;
public static readonly MORE_OR_EQUAL = 17;
public static readonly OPEN_BRACKET = 18;
public static readonly CLOSE_BRACKET = 19;
public static readonly DOT = 20;
public static readonly OPEN_SQUARE_BRACKET = 21;
public static readonly CLOSE_SQUARE_BRACKET = 22;
public static readonly OPEN_CURLY_BRACKET = 23;
public static readonly CLOSE_CURLY_BRACKET = 24;
public static readonly COMMA = 25;
public static readonly COLON = 26;
public static readonly ARROW = 27;
public static readonly NULL_COALESCE = 28;
public static readonly QUESTION_MARK = 29;
public static readonly NUMBER = 30;
public static readonly WHITESPACE = 31;
public static readonly IDENTIFIER = 32;
public static readonly NEWLINE = 33;
public static readonly STRING = 34;
public static readonly INVALID_TOKEN_DEFAULT_MODE = 35;
public static readonly TEMPLATE = 36;
public static readonly ESCAPE_CHARACTER = 37;
public static readonly TEXT_CONTENT = 38;
public static readonly RULE_file = 0;
public static readonly RULE_expression = 1;
public static readonly RULE_primaryExpression = 2;
public static readonly RULE_stringInterpolation = 3;
public static readonly RULE_textContent = 4;
public static readonly RULE_argsList = 5;
public static readonly RULE_lambda = 6;
public static readonly RULE_keyValuePairList = 7;
public static readonly RULE_keyValuePair = 8;
public static readonly RULE_key = 9;
// tslint:disable:no-trailing-whitespace
public static readonly ruleNames: string[] = [
"file", "expression", "primaryExpression", "stringInterpolation", "textContent",
"argsList", "lambda", "keyValuePairList", "keyValuePair", "key",
];
private static readonly _LITERAL_NAMES: Array<string | undefined> = [
undefined, undefined, "'+'", "'-'", "'!'", "'^'", "'*'", "'/'", "'%'",
"'=='", undefined, "'&'", "'&&'", "'||'", "'<'", "'>'", "'<='", "'>='",
"'('", "')'", "'.'", "'['", "']'", "'{'", "'}'", "','", "':'", "'=>'",
"'??'", "'?'",
];
private static readonly _SYMBOLIC_NAMES: Array<string | undefined> = [
undefined, "STRING_INTERPOLATION_START", "PLUS", "SUBSTRACT", "NON", "XOR",
"ASTERISK", "SLASH", "PERCENT", "DOUBLE_EQUAL", "NOT_EQUAL", "SINGLE_AND",
"DOUBLE_AND", "DOUBLE_VERTICAL_CYLINDER", "LESS_THAN", "MORE_THAN", "LESS_OR_EQUAl",
"MORE_OR_EQUAL", "OPEN_BRACKET", "CLOSE_BRACKET", "DOT", "OPEN_SQUARE_BRACKET",
"CLOSE_SQUARE_BRACKET", "OPEN_CURLY_BRACKET", "CLOSE_CURLY_BRACKET", "COMMA",
"COLON", "ARROW", "NULL_COALESCE", "QUESTION_MARK", "NUMBER", "WHITESPACE",
"IDENTIFIER", "NEWLINE", "STRING", "INVALID_TOKEN_DEFAULT_MODE", "TEMPLATE",
"ESCAPE_CHARACTER", "TEXT_CONTENT",
];
public static readonly VOCABULARY: Vocabulary = new VocabularyImpl(ExpressionAntlrParser._LITERAL_NAMES, ExpressionAntlrParser._SYMBOLIC_NAMES, []);
// @Override
// @NotNull
public get vocabulary(): Vocabulary {
return ExpressionAntlrParser.VOCABULARY;
}
// tslint:enable:no-trailing-whitespace
// @Override
public get grammarFileName(): string { return "ExpressionAntlrParser.g4"; }
// @Override
public get ruleNames(): string[] { return ExpressionAntlrParser.ruleNames; }
// @Override
public get serializedATN(): string { return ExpressionAntlrParser._serializedATN; }
constructor(input: TokenStream) {
super(input);
this._interp = new ParserATNSimulator(ExpressionAntlrParser._ATN, this);
}
// @RuleVersion(0)
public file(): FileContext {
let _localctx: FileContext = new FileContext(this._ctx, this.state);
this.enterRule(_localctx, 0, ExpressionAntlrParser.RULE_file);
try {
this.enterOuterAlt(_localctx, 1);
{
this.state = 20;
this.expression(0);
this.state = 21;
this.match(ExpressionAntlrParser.EOF);
}
}
catch (re) {
if (re instanceof RecognitionException) {
_localctx.exception = re;
this._errHandler.reportError(this, re);
this._errHandler.recover(this, re);
} else {
throw re;
}
}
finally {
this.exitRule();
}
return _localctx;
}
public expression(): ExpressionContext;
public expression(_p: number): ExpressionContext;
// @RuleVersion(0)
public expression(_p?: number): ExpressionContext {
if (_p === undefined) {
_p = 0;
}
let _parentctx: ParserRuleContext = this._ctx;
let _parentState: number = this.state;
let _localctx: ExpressionContext = new ExpressionContext(this._ctx, _parentState);
let _prevctx: ExpressionContext = _localctx;
let _startState: number = 2;
this.enterRecursionRule(_localctx, 2, ExpressionAntlrParser.RULE_expression, _p);
let _la: number;
try {
let _alt: number;
this.enterOuterAlt(_localctx, 1);
{
this.state = 27;
this._errHandler.sync(this);
switch (this._input.LA(1)) {
case ExpressionAntlrParser.PLUS:
case ExpressionAntlrParser.SUBSTRACT:
case ExpressionAntlrParser.NON:
{
_localctx = new UnaryOpExpContext(_localctx);
this._ctx = _localctx;
_prevctx = _localctx;
this.state = 24;
_la = this._input.LA(1);
if (!((((_la) & ~0x1F) === 0 && ((1 << _la) & ((1 << ExpressionAntlrParser.PLUS) | (1 << ExpressionAntlrParser.SUBSTRACT) | (1 << ExpressionAntlrParser.NON))) !== 0))) {
this._errHandler.recoverInline(this);
} else {
if (this._input.LA(1) === Token.EOF) {
this.matchedEOF = true;
}
this._errHandler.reportMatch(this);
this.consume();
}
this.state = 25;
this.expression(12);
}
break;
case ExpressionAntlrParser.STRING_INTERPOLATION_START:
case ExpressionAntlrParser.OPEN_BRACKET:
case ExpressionAntlrParser.OPEN_SQUARE_BRACKET:
case ExpressionAntlrParser.OPEN_CURLY_BRACKET:
case ExpressionAntlrParser.NUMBER:
case ExpressionAntlrParser.IDENTIFIER:
case ExpressionAntlrParser.STRING:
{
_localctx = new PrimaryExpContext(_localctx);
this._ctx = _localctx;
_prevctx = _localctx;
this.state = 26;
this.primaryExpression(0);
}
break;
default:
throw new NoViableAltException(this);
}
this._ctx._stop = this._input.tryLT(-1);
this.state = 64;
this._errHandler.sync(this);
_alt = this.interpreter.adaptivePredict(this._input, 2, this._ctx);
while (_alt !== 2 && _alt !== ATN.INVALID_ALT_NUMBER) {
if (_alt === 1) {
if (this._parseListeners != null) {
this.triggerExitRuleEvent();
}
_prevctx = _localctx;
{
this.state = 62;
this._errHandler.sync(this);
switch ( this.interpreter.adaptivePredict(this._input, 1, this._ctx) ) {
case 1:
{
_localctx = new BinaryOpExpContext(new ExpressionContext(_parentctx, _parentState));
this.pushNewRecursionContext(_localctx, _startState, ExpressionAntlrParser.RULE_expression);
this.state = 29;
if (!(this.precpred(this._ctx, 11))) {
throw new FailedPredicateException(this, "this.precpred(this._ctx, 11)");
}
this.state = 30;
this.match(ExpressionAntlrParser.XOR);
this.state = 31;
this.expression(11);
}
break;
case 2:
{
_localctx = new BinaryOpExpContext(new ExpressionContext(_parentctx, _parentState));
this.pushNewRecursionContext(_localctx, _startState, ExpressionAntlrParser.RULE_expression);
this.state = 32;
if (!(this.precpred(this._ctx, 10))) {
throw new FailedPredicateException(this, "this.precpred(this._ctx, 10)");
}
this.state = 33;
_la = this._input.LA(1);
if (!((((_la) & ~0x1F) === 0 && ((1 << _la) & ((1 << ExpressionAntlrParser.ASTERISK) | (1 << ExpressionAntlrParser.SLASH) | (1 << ExpressionAntlrParser.PERCENT))) !== 0))) {
this._errHandler.recoverInline(this);
} else {
if (this._input.LA(1) === Token.EOF) {
this.matchedEOF = true;
}
this._errHandler.reportMatch(this);
this.consume();
}
this.state = 34;
this.expression(11);
}
break;
case 3:
{
_localctx = new BinaryOpExpContext(new ExpressionContext(_parentctx, _parentState));
this.pushNewRecursionContext(_localctx, _startState, ExpressionAntlrParser.RULE_expression);
this.state = 35;
if (!(this.precpred(this._ctx, 9))) {
throw new FailedPredicateException(this, "this.precpred(this._ctx, 9)");
}
this.state = 36;
_la = this._input.LA(1);
if (!(_la === ExpressionAntlrParser.PLUS || _la === ExpressionAntlrParser.SUBSTRACT)) {
this._errHandler.recoverInline(this);
} else {
if (this._input.LA(1) === Token.EOF) {
this.matchedEOF = true;
}
this._errHandler.reportMatch(this);
this.consume();
}
this.state = 37;
this.expression(10);
}
break;
case 4:
{
_localctx = new BinaryOpExpContext(new ExpressionContext(_parentctx, _parentState));
this.pushNewRecursionContext(_localctx, _startState, ExpressionAntlrParser.RULE_expression);
this.state = 38;
if (!(this.precpred(this._ctx, 8))) {
throw new FailedPredicateException(this, "this.precpred(this._ctx, 8)");
}
this.state = 39;
_la = this._input.LA(1);
if (!(_la === ExpressionAntlrParser.DOUBLE_EQUAL || _la === ExpressionAntlrParser.NOT_EQUAL)) {
this._errHandler.recoverInline(this);
} else {
if (this._input.LA(1) === Token.EOF) {
this.matchedEOF = true;
}
this._errHandler.reportMatch(this);
this.consume();
}
this.state = 40;
this.expression(9);
}
break;
case 5:
{
_localctx = new BinaryOpExpContext(new ExpressionContext(_parentctx, _parentState));
this.pushNewRecursionContext(_localctx, _startState, ExpressionAntlrParser.RULE_expression);
this.state = 41;
if (!(this.precpred(this._ctx, 7))) {
throw new FailedPredicateException(this, "this.precpred(this._ctx, 7)");
}
{
this.state = 42;
this.match(ExpressionAntlrParser.SINGLE_AND);
}
this.state = 43;
this.expression(8);
}
break;
case 6:
{
_localctx = new BinaryOpExpContext(new ExpressionContext(_parentctx, _parentState));
this.pushNewRecursionContext(_localctx, _startState, ExpressionAntlrParser.RULE_expression);
this.state = 44;
if (!(this.precpred(this._ctx, 6))) {
throw new FailedPredicateException(this, "this.precpred(this._ctx, 6)");
}
this.state = 45;
_la = this._input.LA(1);
if (!((((_la) & ~0x1F) === 0 && ((1 << _la) & ((1 << ExpressionAntlrParser.LESS_THAN) | (1 << ExpressionAntlrParser.MORE_THAN) | (1 << ExpressionAntlrParser.LESS_OR_EQUAl) | (1 << ExpressionAntlrParser.MORE_OR_EQUAL))) !== 0))) {
this._errHandler.recoverInline(this);
} else {
if (this._input.LA(1) === Token.EOF) {
this.matchedEOF = true;
}
this._errHandler.reportMatch(this);
this.consume();
}
this.state = 46;
this.expression(7);
}
break;
case 7:
{
_localctx = new BinaryOpExpContext(new ExpressionContext(_parentctx, _parentState));
this.pushNewRecursionContext(_localctx, _startState, ExpressionAntlrParser.RULE_expression);
this.state = 47;
if (!(this.precpred(this._ctx, 5))) {
throw new FailedPredicateException(this, "this.precpred(this._ctx, 5)");
}
this.state = 48;
this.match(ExpressionAntlrParser.DOUBLE_AND);
this.state = 49;
this.expression(6);
}
break;
case 8:
{
_localctx = new BinaryOpExpContext(new ExpressionContext(_parentctx, _parentState));
this.pushNewRecursionContext(_localctx, _startState, ExpressionAntlrParser.RULE_expression);
this.state = 50;
if (!(this.precpred(this._ctx, 4))) {
throw new FailedPredicateException(this, "this.precpred(this._ctx, 4)");
}
this.state = 51;
this.match(ExpressionAntlrParser.DOUBLE_VERTICAL_CYLINDER);
this.state = 52;
this.expression(5);
}
break;
case 9:
{
_localctx = new BinaryOpExpContext(new ExpressionContext(_parentctx, _parentState));
this.pushNewRecursionContext(_localctx, _startState, ExpressionAntlrParser.RULE_expression);
this.state = 53;
if (!(this.precpred(this._ctx, 3))) {
throw new FailedPredicateException(this, "this.precpred(this._ctx, 3)");
}
this.state = 54;
this.match(ExpressionAntlrParser.NULL_COALESCE);
this.state = 55;
this.expression(4);
}
break;
case 10:
{
_localctx = new TripleOpExpContext(new ExpressionContext(_parentctx, _parentState));
this.pushNewRecursionContext(_localctx, _startState, ExpressionAntlrParser.RULE_expression);
this.state = 56;
if (!(this.precpred(this._ctx, 2))) {
throw new FailedPredicateException(this, "this.precpred(this._ctx, 2)");
}
this.state = 57;
this.match(ExpressionAntlrParser.QUESTION_MARK);
this.state = 58;
this.expression(0);
this.state = 59;
this.match(ExpressionAntlrParser.COLON);
this.state = 60;
this.expression(3);
}
break;
}
}
}
this.state = 66;
this._errHandler.sync(this);
_alt = this.interpreter.adaptivePredict(this._input, 2, this._ctx);
}
}
}
catch (re) {
if (re instanceof RecognitionException) {
_localctx.exception = re;
this._errHandler.reportError(this, re);
this._errHandler.recover(this, re);
} else {
throw re;
}
}
finally {
this.unrollRecursionContexts(_parentctx);
}
return _localctx;
}
public primaryExpression(): PrimaryExpressionContext;
public primaryExpression(_p: number): PrimaryExpressionContext;
// @RuleVersion(0)
public primaryExpression(_p?: number): PrimaryExpressionContext {
if (_p === undefined) {
_p = 0;
}
let _parentctx: ParserRuleContext = this._ctx;
let _parentState: number = this.state;
let _localctx: PrimaryExpressionContext = new PrimaryExpressionContext(this._ctx, _parentState);
let _prevctx: PrimaryExpressionContext = _localctx;
let _startState: number = 4;
this.enterRecursionRule(_localctx, 4, ExpressionAntlrParser.RULE_primaryExpression, _p);
let _la: number;
try {
let _alt: number;
this.enterOuterAlt(_localctx, 1);
{
this.state = 86;
this._errHandler.sync(this);
switch (this._input.LA(1)) {
case ExpressionAntlrParser.OPEN_BRACKET:
{
_localctx = new ParenthesisExpContext(_localctx);
this._ctx = _localctx;
_prevctx = _localctx;
this.state = 68;
this.match(ExpressionAntlrParser.OPEN_BRACKET);
this.state = 69;
this.expression(0);
this.state = 70;
this.match(ExpressionAntlrParser.CLOSE_BRACKET);
}
break;
case ExpressionAntlrParser.OPEN_SQUARE_BRACKET:
{
_localctx = new ArrayCreationExpContext(_localctx);
this._ctx = _localctx;
_prevctx = _localctx;
this.state = 72;
this.match(ExpressionAntlrParser.OPEN_SQUARE_BRACKET);
this.state = 74;
this._errHandler.sync(this);
_la = this._input.LA(1);
if ((((_la) & ~0x1F) === 0 && ((1 << _la) & ((1 << ExpressionAntlrParser.STRING_INTERPOLATION_START) | (1 << ExpressionAntlrParser.PLUS) | (1 << ExpressionAntlrParser.SUBSTRACT) | (1 << ExpressionAntlrParser.NON) | (1 << ExpressionAntlrParser.OPEN_BRACKET) | (1 << ExpressionAntlrParser.OPEN_SQUARE_BRACKET) | (1 << ExpressionAntlrParser.OPEN_CURLY_BRACKET) | (1 << ExpressionAntlrParser.NUMBER))) !== 0) || _la === ExpressionAntlrParser.IDENTIFIER || _la === ExpressionAntlrParser.STRING) {
{
this.state = 73;
this.argsList();
}
}
this.state = 76;
this.match(ExpressionAntlrParser.CLOSE_SQUARE_BRACKET);
}
break;
case ExpressionAntlrParser.OPEN_CURLY_BRACKET:
{
_localctx = new JsonCreationExpContext(_localctx);
this._ctx = _localctx;
_prevctx = _localctx;
this.state = 77;
this.match(ExpressionAntlrParser.OPEN_CURLY_BRACKET);
this.state = 79;
this._errHandler.sync(this);
_la = this._input.LA(1);
if (_la === ExpressionAntlrParser.IDENTIFIER || _la === ExpressionAntlrParser.STRING) {
{
this.state = 78;
this.keyValuePairList();
}
}
this.state = 81;
this.match(ExpressionAntlrParser.CLOSE_CURLY_BRACKET);
}
break;
case ExpressionAntlrParser.NUMBER:
{
_localctx = new NumericAtomContext(_localctx);
this._ctx = _localctx;
_prevctx = _localctx;
this.state = 82;
this.match(ExpressionAntlrParser.NUMBER);
}
break;
case ExpressionAntlrParser.STRING:
{
_localctx = new StringAtomContext(_localctx);
this._ctx = _localctx;
_prevctx = _localctx;
this.state = 83;
this.match(ExpressionAntlrParser.STRING);
}
break;
case ExpressionAntlrParser.IDENTIFIER:
{
_localctx = new IdAtomContext(_localctx);
this._ctx = _localctx;
_prevctx = _localctx;
this.state = 84;
this.match(ExpressionAntlrParser.IDENTIFIER);
}
break;
case ExpressionAntlrParser.STRING_INTERPOLATION_START:
{
_localctx = new StringInterpolationAtomContext(_localctx);
this._ctx = _localctx;
_prevctx = _localctx;
this.state = 85;
this.stringInterpolation();
}
break;
default:
throw new NoViableAltException(this);
}
this._ctx._stop = this._input.tryLT(-1);
this.state = 107;
this._errHandler.sync(this);
_alt = this.interpreter.adaptivePredict(this._input, 9, this._ctx);
while (_alt !== 2 && _alt !== ATN.INVALID_ALT_NUMBER) {
if (_alt === 1) {
if (this._parseListeners != null) {
this.triggerExitRuleEvent();
}
_prevctx = _localctx;
{
this.state = 105;
this._errHandler.sync(this);
switch ( this.interpreter.adaptivePredict(this._input, 8, this._ctx) ) {
case 1:
{
_localctx = new MemberAccessExpContext(new PrimaryExpressionContext(_parentctx, _parentState));
this.pushNewRecursionContext(_localctx, _startState, ExpressionAntlrParser.RULE_primaryExpression);
this.state = 88;
if (!(this.precpred(this._ctx, 3))) {
throw new FailedPredicateException(this, "this.precpred(this._ctx, 3)");
}
this.state = 89;
this.match(ExpressionAntlrParser.DOT);
this.state = 90;
this.match(ExpressionAntlrParser.IDENTIFIER);
}
break;
case 2:
{
_localctx = new FuncInvokeExpContext(new PrimaryExpressionContext(_parentctx, _parentState));
this.pushNewRecursionContext(_localctx, _startState, ExpressionAntlrParser.RULE_primaryExpression);
this.state = 91;
if (!(this.precpred(this._ctx, 2))) {
throw new FailedPredicateException(this, "this.precpred(this._ctx, 2)");
}
this.state = 93;
this._errHandler.sync(this);
_la = this._input.LA(1);
if (_la === ExpressionAntlrParser.NON) {
{
this.state = 92;
this.match(ExpressionAntlrParser.NON);
}
}
this.state = 95;
this.match(ExpressionAntlrParser.OPEN_BRACKET);
this.state = 97;
this._errHandler.sync(this);
_la = this._input.LA(1);
if ((((_la) & ~0x1F) === 0 && ((1 << _la) & ((1 << ExpressionAntlrParser.STRING_INTERPOLATION_START) | (1 << ExpressionAntlrParser.PLUS) | (1 << ExpressionAntlrParser.SUBSTRACT) | (1 << ExpressionAntlrParser.NON) | (1 << ExpressionAntlrParser.OPEN_BRACKET) | (1 << ExpressionAntlrParser.OPEN_SQUARE_BRACKET) | (1 << ExpressionAntlrParser.OPEN_CURLY_BRACKET) | (1 << ExpressionAntlrParser.NUMBER))) !== 0) || _la === ExpressionAntlrParser.IDENTIFIER || _la === ExpressionAntlrParser.STRING) {
{
this.state = 96;
this.argsList();
}
}
this.state = 99;
this.match(ExpressionAntlrParser.CLOSE_BRACKET);
}
break;
case 3:
{
_localctx = new IndexAccessExpContext(new PrimaryExpressionContext(_parentctx, _parentState));
this.pushNewRecursionContext(_localctx, _startState, ExpressionAntlrParser.RULE_primaryExpression);
this.state = 100;
if (!(this.precpred(this._ctx, 1))) {
throw new FailedPredicateException(this, "this.precpred(this._ctx, 1)");
}
this.state = 101;
this.match(ExpressionAntlrParser.OPEN_SQUARE_BRACKET);
this.state = 102;
this.expression(0);
this.state = 103;
this.match(ExpressionAntlrParser.CLOSE_SQUARE_BRACKET);
}
break;
}
}
}
this.state = 109;
this._errHandler.sync(this);
_alt = this.interpreter.adaptivePredict(this._input, 9, this._ctx);
}
}
}
catch (re) {
if (re instanceof RecognitionException) {
_localctx.exception = re;
this._errHandler.reportError(this, re);
this._errHandler.recover(this, re);
} else {
throw re;
}
}
finally {
this.unrollRecursionContexts(_parentctx);
}
return _localctx;
}
// @RuleVersion(0)
public stringInterpolation(): StringInterpolationContext {
let _localctx: StringInterpolationContext = new StringInterpolationContext(this._ctx, this.state);
this.enterRule(_localctx, 6, ExpressionAntlrParser.RULE_stringInterpolation);
let _la: number;
try {
this.enterOuterAlt(_localctx, 1);
{
this.state = 110;
this.match(ExpressionAntlrParser.STRING_INTERPOLATION_START);
this.state = 116;
this._errHandler.sync(this);
_la = this._input.LA(1);
while (((((_la - 36)) & ~0x1F) === 0 && ((1 << (_la - 36)) & ((1 << (ExpressionAntlrParser.TEMPLATE - 36)) | (1 << (ExpressionAntlrParser.ESCAPE_CHARACTER - 36)) | (1 << (ExpressionAntlrParser.TEXT_CONTENT - 36)))) !== 0)) {
{
this.state = 114;
this._errHandler.sync(this);
switch (this._input.LA(1)) {
case ExpressionAntlrParser.ESCAPE_CHARACTER:
{
this.state = 111;
this.match(ExpressionAntlrParser.ESCAPE_CHARACTER);
}
break;
case ExpressionAntlrParser.TEMPLATE:
{
this.state = 112;
this.match(ExpressionAntlrParser.TEMPLATE);
}
break;
case ExpressionAntlrParser.TEXT_CONTENT:
{
this.state = 113;
this.textContent();
}
break;
default:
throw new NoViableAltException(this);
}
}
this.state = 118;
this._errHandler.sync(this);
_la = this._input.LA(1);
}
this.state = 119;
this.match(ExpressionAntlrParser.STRING_INTERPOLATION_START);
}
}
catch (re) {
if (re instanceof RecognitionException) {
_localctx.exception = re;
this._errHandler.reportError(this, re);
this._errHandler.recover(this, re);
} else {
throw re;
}
}
finally {
this.exitRule();
}
return _localctx;
}
// @RuleVersion(0)
public textContent(): TextContentContext {
let _localctx: TextContentContext = new TextContentContext(this._ctx, this.state);
this.enterRule(_localctx, 8, ExpressionAntlrParser.RULE_textContent);
try {
let _alt: number;
this.enterOuterAlt(_localctx, 1);
{
this.state = 122;
this._errHandler.sync(this);
_alt = 1;
do {
switch (_alt) {
case 1:
{
{
this.state = 121;
this.match(ExpressionAntlrParser.TEXT_CONTENT);
}
}
break;
default:
throw new NoViableAltException(this);
}
this.state = 124;
this._errHandler.sync(this);
_alt = this.interpreter.adaptivePredict(this._input, 12, this._ctx);
} while (_alt !== 2 && _alt !== ATN.INVALID_ALT_NUMBER);
}
}
catch (re) {
if (re instanceof RecognitionException) {
_localctx.exception = re;
this._errHandler.reportError(this, re);
this._errHandler.recover(this, re);
} else {
throw re;
}
}
finally {
this.exitRule();
}
return _localctx;
}
// @RuleVersion(0)
public argsList(): ArgsListContext {
let _localctx: ArgsListContext = new ArgsListContext(this._ctx, this.state);
this.enterRule(_localctx, 10, ExpressionAntlrParser.RULE_argsList);
let _la: number;
try {
this.enterOuterAlt(_localctx, 1);
{
this.state = 128;
this._errHandler.sync(this);
switch ( this.interpreter.adaptivePredict(this._input, 13, this._ctx) ) {
case 1:
{
this.state = 126;
this.lambda();
}
break;
case 2:
{
this.state = 127;
this.expression(0);
}
break;
}
this.state = 137;
this._errHandler.sync(this);
_la = this._input.LA(1);
while (_la === ExpressionAntlrParser.COMMA) {
{
{
this.state = 130;
this.match(ExpressionAntlrParser.COMMA);
this.state = 133;
this._errHandler.sync(this);
switch ( this.interpreter.adaptivePredict(this._input, 14, this._ctx) ) {
case 1:
{
this.state = 131;
this.lambda();
}
break;
case 2:
{
this.state = 132;
this.expression(0);
}
break;
}
}
}
this.state = 139;
this._errHandler.sync(this);
_la = this._input.LA(1);
}
}
}
catch (re) {
if (re instanceof RecognitionException) {
_localctx.exception = re;
this._errHandler.reportError(this, re);
this._errHandler.recover(this, re);
} else {
throw re;
}
}
finally {
this.exitRule();
}
return _localctx;
}
// @RuleVersion(0)
public lambda(): LambdaContext {
let _localctx: LambdaContext = new LambdaContext(this._ctx, this.state);
this.enterRule(_localctx, 12, ExpressionAntlrParser.RULE_lambda);
try {
this.enterOuterAlt(_localctx, 1);
{
this.state = 140;
this.match(ExpressionAntlrParser.IDENTIFIER);
this.state = 141;
this.match(ExpressionAntlrParser.ARROW);
this.state = 142;
this.expression(0);
}
}
catch (re) {
if (re instanceof RecognitionException) {
_localctx.exception = re;
this._errHandler.reportError(this, re);
this._errHandler.recover(this, re);
} else {
throw re;
}
}
finally {
this.exitRule();
}
return _localctx;
}
// @RuleVersion(0)
public keyValuePairList(): KeyValuePairListContext {
let _localctx: KeyValuePairListContext = new KeyValuePairListContext(this._ctx, this.state);
this.enterRule(_localctx, 14, ExpressionAntlrParser.RULE_keyValuePairList);
let _la: number;
try {
this.enterOuterAlt(_localctx, 1);
{
this.state = 144;
this.keyValuePair();
this.state = 149;
this._errHandler.sync(this);
_la = this._input.LA(1);
while (_la === ExpressionAntlrParser.COMMA) {
{
{
this.state = 145;
this.match(ExpressionAntlrParser.COMMA);
this.state = 146;
this.keyValuePair();
}
}
this.state = 151;
this._errHandler.sync(this);
_la = this._input.LA(1);
}
}
}
catch (re) {
if (re instanceof RecognitionException) {
_localctx.exception = re;
this._errHandler.reportError(this, re);
this._errHandler.recover(this, re);
} else {
throw re;
}
}
finally {
this.exitRule();
}
return _localctx;
}
// @RuleVersion(0)
public keyValuePair(): KeyValuePairContext {
let _localctx: KeyValuePairContext = new KeyValuePairContext(this._ctx, this.state);
this.enterRule(_localctx, 16, ExpressionAntlrParser.RULE_keyValuePair);
try {
this.enterOuterAlt(_localctx, 1);
{
this.state = 152;
this.key();
this.state = 153;
this.match(ExpressionAntlrParser.COLON);
this.state = 154;
this.expression(0);
}
}
catch (re) {
if (re instanceof RecognitionException) {
_localctx.exception = re;
this._errHandler.reportError(this, re);
this._errHandler.recover(this, re);
} else {
throw re;
}
}
finally {
this.exitRule();
}
return _localctx;
}
// @RuleVersion(0)
public key(): KeyContext {
let _localctx: KeyContext = new KeyContext(this._ctx, this.state);
this.enterRule(_localctx, 18, ExpressionAntlrParser.RULE_key);
let _la: number;
try {
this.enterOuterAlt(_localctx, 1);
{
this.state = 156;
_la = this._input.LA(1);
if (!(_la === ExpressionAntlrParser.IDENTIFIER || _la === ExpressionAntlrParser.STRING)) {
this._errHandler.recoverInline(this);
} else {
if (this._input.LA(1) === Token.EOF) {
this.matchedEOF = true;
}
this._errHandler.reportMatch(this);
this.consume();
}
}
}
catch (re) {
if (re instanceof RecognitionException) {
_localctx.exception = re;
this._errHandler.reportError(this, re);
this._errHandler.recover(this, re);
} else {
throw re;
}
}
finally {
this.exitRule();
}
return _localctx;
}
public sempred(_localctx: RuleContext, ruleIndex: number, predIndex: number): boolean {
switch (ruleIndex) {
case 1:
return this.expression_sempred(_localctx as ExpressionContext, predIndex);
case 2:
return this.primaryExpression_sempred(_localctx as PrimaryExpressionContext, predIndex);
}
return true;
}
private expression_sempred(_localctx: ExpressionContext, predIndex: number): boolean {
switch (predIndex) {
case 0:
return this.precpred(this._ctx, 11);
case 1:
return this.precpred(this._ctx, 10);
case 2:
return this.precpred(this._ctx, 9);
case 3:
return this.precpred(this._ctx, 8);
case 4:
return this.precpred(this._ctx, 7);
case 5:
return this.precpred(this._ctx, 6);
case 6:
return this.precpred(this._ctx, 5);
case 7:
return this.precpred(this._ctx, 4);
case 8:
return this.precpred(this._ctx, 3);
case 9:
return this.precpred(this._ctx, 2);
}
return true;
}
private primaryExpression_sempred(_localctx: PrimaryExpressionContext, predIndex: number): boolean {
switch (predIndex) {
case 10:
return this.precpred(this._ctx, 3);
case 11:
return this.precpred(this._ctx, 2);
case 12:
return this.precpred(this._ctx, 1);
}
return true;
}
public static readonly _serializedATN: string =
"\x03\uC91D\uCABA\u058D\uAFBA\u4F53\u0607\uEA8B\uC241\x03(\xA1\x04\x02" +
"\t\x02\x04\x03\t\x03\x04\x04\t\x04\x04\x05\t\x05\x04\x06\t\x06\x04\x07" +
"\t\x07\x04\b\t\b\x04\t\t\t\x04\n\t\n\x04\v\t\v\x03\x02\x03\x02\x03\x02" +
"\x03\x03\x03\x03\x03\x03\x03\x03\x05\x03\x1E\n\x03\x03\x03\x03\x03\x03" +
"\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03" +
"\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03" +
"\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03" +
"\x03\x03\x03\x03\x03\x03\x03\x07\x03A\n\x03\f\x03\x0E\x03D\v\x03\x03\x04" +
"\x03\x04\x03\x04\x03\x04\x03\x04\x03\x04\x03\x04\x05\x04M\n\x04\x03\x04" +
"\x03\x04\x03\x04\x05\x04R\n\x04\x03\x04\x03\x04\x03\x04\x03\x04\x03\x04" +
"\x05\x04Y\n\x04\x03\x04\x03\x04\x03\x04\x03\x04\x03\x04\x05\x04`\n\x04" +
"\x03\x04\x03\x04\x05\x04d\n\x04\x03\x04\x03\x04\x03\x04\x03\x04\x03\x04" +
"\x03\x04\x07\x04l\n\x04\f\x04\x0E\x04o\v\x04\x03\x05\x03\x05\x03\x05\x03" +
"\x05\x07\x05u\n\x05\f\x05\x0E\x05x\v\x05\x03\x05\x03\x05\x03\x06\x06\x06" +
"}\n\x06\r\x06\x0E\x06~\x03\x07\x03\x07\x05\x07\x83\n\x07\x03\x07\x03\x07" +
"\x03\x07\x05\x07\x88\n\x07\x07\x07\x8A\n\x07\f\x07\x0E\x07\x8D\v\x07\x03" +
"\b\x03\b\x03\b\x03\b\x03\t\x03\t\x03\t\x07\t\x96\n\t\f\t\x0E\t\x99\v\t" +
"\x03\n\x03\n\x03\n\x03\n\x03\v\x03\v\x03\v\x02\x02\x04\x04\x06\f\x02\x02" +
"\x04\x02\x06\x02\b\x02\n\x02\f\x02\x0E\x02\x10\x02\x12\x02\x14\x02\x02" +
"\b\x03\x02\x04\x06\x03\x02\b\n\x03\x02\x04\x05\x03\x02\v\f\x03\x02\x10" +
"\x13\x04\x02\"\"$$\x02\xB6\x02\x16\x03\x02\x02\x02\x04\x1D\x03\x02\x02" +
"\x02\x06X\x03\x02\x02\x02\bp\x03\x02\x02\x02\n|\x03\x02\x02\x02\f\x82" +
"\x03\x02\x02\x02\x0E\x8E\x03\x02\x02\x02\x10\x92\x03\x02\x02\x02\x12\x9A" +
"\x03\x02\x02\x02\x14\x9E\x03\x02\x02\x02\x16\x17\x05\x04\x03\x02\x17\x18" +
"\x07\x02\x02\x03\x18\x03\x03\x02\x02\x02\x19\x1A\b\x03\x01\x02\x1A\x1B" +
"\t\x02\x02\x02\x1B\x1E\x05\x04\x03\x0E\x1C\x1E\x05\x06\x04\x02\x1D\x19" +
"\x03\x02\x02\x02\x1D\x1C\x03\x02\x02\x02\x1EB\x03\x02\x02\x02\x1F \f\r" +
"\x02\x02 !\x07\x07\x02\x02!A\x05\x04\x03\r\"#\f\f\x02\x02#$\t\x03\x02" +
"\x02$A\x05\x04\x03\r%&\f\v\x02\x02&\'\t\x04\x02\x02\'A\x05\x04\x03\f(" +
")\f\n\x02\x02)*\t\x05\x02\x02*A\x05\x04\x03\v+,\f\t\x02\x02,-\x07\r\x02" +
"\x02-A\x05\x04\x03\n./\f\b\x02\x02/0\t\x06\x02\x020A\x05\x04\x03\t12\f" +
"\x07\x02\x0223\x07\x0E\x02\x023A\x05\x04\x03\b45\f\x06\x02\x0256\x07\x0F" +
"\x02\x026A\x05\x04\x03\x0778\f\x05\x02\x0289\x07\x1E\x02\x029A\x05\x04" +
"\x03\x06:;\f\x04\x02\x02;<\x07\x1F\x02\x02<=\x05\x04\x03\x02=>\x07\x1C" +
"\x02\x02>?\x05\x04\x03\x05?A\x03\x02\x02\x02@\x1F\x03\x02\x02\x02@\"\x03" +
"\x02\x02\x02@%\x03\x02\x02\x02@(\x03\x02\x02\x02@+\x03\x02\x02\x02@.\x03" +
"\x02\x02\x02@1\x03\x02\x02\x02@4\x03\x02\x02\x02@7\x03\x02\x02\x02@:\x03" +
"\x02\x02\x02AD\x03\x02\x02\x02B@\x03\x02\x02\x02BC\x03\x02\x02\x02C\x05" +
"\x03\x02\x02\x02DB\x03\x02\x02\x02EF\b\x04\x01\x02FG\x07\x14\x02\x02G" +
"H\x05\x04\x03\x02HI\x07\x15\x02\x02IY\x03\x02\x02\x02JL\x07\x17\x02\x02" +
"KM\x05\f\x07\x02LK\x03\x02\x02\x02LM\x03\x02\x02\x02MN\x03\x02\x02\x02" +
"NY\x07\x18\x02\x02OQ\x07\x19\x02\x02PR\x05\x10\t\x02QP\x03\x02\x02\x02" +
"QR\x03\x02\x02\x02RS\x03\x02\x02\x02SY\x07\x1A\x02\x02TY\x07 \x02\x02" +
"UY\x07$\x02\x02VY\x07\"\x02\x02WY\x05\b\x05\x02XE\x03\x02\x02\x02XJ\x03" +
"\x02\x02\x02XO\x03\x02\x02\x02XT\x03\x02\x02\x02XU\x03\x02\x02\x02XV\x03" +
"\x02\x02\x02XW\x03\x02\x02\x02Ym\x03\x02\x02\x02Z[\f\x05\x02\x02[\\\x07" +
"\x16\x02\x02\\l\x07\"\x02\x02]_\f\x04\x02\x02^`\x07\x06\x02\x02_^\x03" +
"\x02\x02\x02_`\x03\x02\x02\x02`a\x03\x02\x02\x02ac\x07\x14\x02\x02bd\x05" +
"\f\x07\x02cb\x03\x02\x02\x02cd\x03\x02\x02\x02de\x03\x02\x02\x02el\x07" +
"\x15\x02\x02fg\f\x03\x02\x02gh\x07\x17\x02\x02hi\x05\x04\x03\x02ij\x07" +
"\x18\x02\x02jl\x03\x02\x02\x02kZ\x03\x02\x02\x02k]\x03\x02\x02\x02kf\x03" +
"\x02\x02\x02lo\x03\x02\x02\x02mk\x03\x02\x02\x02mn\x03\x02\x02\x02n\x07" +
"\x03\x02\x02\x02om\x03\x02\x02\x02pv\x07\x03\x02\x02qu\x07\'\x02\x02r" +
"u\x07&\x02\x02su\x05\n\x06\x02tq\x03\x02\x02\x02tr\x03\x02\x02\x02ts\x03" +
"\x02\x02\x02ux\x03\x02\x02\x02vt\x03\x02\x02\x02vw\x03\x02\x02\x02wy\x03" +
"\x02\x02\x02xv\x03\x02\x02\x02yz\x07\x03\x02\x02z\t\x03\x02\x02\x02{}" +
"\x07(\x02\x02|{\x03\x02\x02\x02}~\x03\x02\x02\x02~|\x03\x02\x02\x02~\x7F" +
"\x03\x02\x02\x02\x7F\v\x03\x02\x02\x02\x80\x83\x05\x0E\b\x02\x81\x83\x05" +
"\x04\x03\x02\x82\x80\x03\x02\x02\x02\x82\x81\x03\x02\x02\x02\x83\x8B\x03" +
"\x02\x02\x02\x84\x87\x07\x1B\x02\x02\x85\x88\x05\x0E\b\x02\x86\x88\x05" +
"\x04\x03\x02\x87\x85\x03\x02\x02\x02\x87\x86\x03\x02\x02\x02\x88\x8A\x03" +
"\x02\x02\x02\x89\x84\x03\x02\x02\x02\x8A\x8D\x03\x02\x02\x02\x8B\x89\x03" +
"\x02\x02\x02\x8B\x8C\x03\x02\x02\x02\x8C\r\x03\x02\x02\x02\x8D\x8B\x03" +
"\x02\x02\x02\x8E\x8F\x07\"\x02\x02\x8F\x90\x07\x1D\x02\x02\x90\x91\x05" +
"\x04\x03\x02\x91\x0F\x03\x02\x02\x02\x92\x97\x05\x12\n\x02\x93\x94\x07" +
"\x1B\x02\x02\x94\x96\x05\x12\n\x02\x95\x93\x03\x02\x02\x02\x96\x99\x03" +
"\x02\x02\x02\x97\x95\x03\x02\x02\x02\x97\x98\x03\x02\x02\x02\x98\x11\x03" +
"\x02\x02\x02\x99\x97\x03\x02\x02\x02\x9A\x9B\x05\x14\v\x02\x9B\x9C\x07" +
"\x1C\x02\x02\x9C\x9D\x05\x04\x03\x02\x9D\x13\x03\x02\x02\x02\x9E\x9F\t" +
"\x07\x02\x02\x9F\x15\x03\x02\x02\x02\x13\x1D@BLQX_ckmtv~\x82\x87\x8B\x97";
public static __ATN: ATN;
public static get _ATN(): ATN {
if (!ExpressionAntlrParser.__ATN) {
ExpressionAntlrParser.__ATN = new ATNDeserializer().deserialize(Utils.toCharArray(ExpressionAntlrParser._serializedATN));
}
return ExpressionAntlrParser.__ATN;
}
}
export class FileContext extends ParserRuleContext {
public expression(): ExpressionContext {
return this.getRuleContext(0, ExpressionContext);
}
public EOF(): TerminalNode { return this.getToken(ExpressionAntlrParser.EOF, 0); }
constructor(parent: ParserRuleContext | undefined, invokingState: number) {
super(parent, invokingState);
}
// @Override
public get ruleIndex(): number { return ExpressionAntlrParser.RULE_file; }
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterFile) {
listener.enterFile(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitFile) {
listener.exitFile(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitFile) {
return visitor.visitFile(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class ExpressionContext extends ParserRuleContext {
constructor(parent: ParserRuleContext | undefined, invokingState: number) {
super(parent, invokingState);
}
// @Override
public get ruleIndex(): number { return ExpressionAntlrParser.RULE_expression; }
public copyFrom(ctx: ExpressionContext): void {
super.copyFrom(ctx);
}
}
export class UnaryOpExpContext extends ExpressionContext {
public expression(): ExpressionContext {
return this.getRuleContext(0, ExpressionContext);
}
public NON(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.NON, 0); }
public SUBSTRACT(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.SUBSTRACT, 0); }
public PLUS(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.PLUS, 0); }
constructor(ctx: ExpressionContext) {
super(ctx.parent, ctx.invokingState);
this.copyFrom(ctx);
}
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterUnaryOpExp) {
listener.enterUnaryOpExp(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitUnaryOpExp) {
listener.exitUnaryOpExp(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitUnaryOpExp) {
return visitor.visitUnaryOpExp(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class BinaryOpExpContext extends ExpressionContext {
public expression(): ExpressionContext[];
public expression(i: number): ExpressionContext;
public expression(i?: number): ExpressionContext | ExpressionContext[] {
if (i === undefined) {
return this.getRuleContexts(ExpressionContext);
} else {
return this.getRuleContext(i, ExpressionContext);
}
}
public XOR(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.XOR, 0); }
public ASTERISK(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.ASTERISK, 0); }
public SLASH(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.SLASH, 0); }
public PERCENT(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.PERCENT, 0); }
public PLUS(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.PLUS, 0); }
public SUBSTRACT(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.SUBSTRACT, 0); }
public DOUBLE_EQUAL(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.DOUBLE_EQUAL, 0); }
public NOT_EQUAL(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.NOT_EQUAL, 0); }
public SINGLE_AND(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.SINGLE_AND, 0); }
public LESS_THAN(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.LESS_THAN, 0); }
public LESS_OR_EQUAl(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.LESS_OR_EQUAl, 0); }
public MORE_THAN(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.MORE_THAN, 0); }
public MORE_OR_EQUAL(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.MORE_OR_EQUAL, 0); }
public DOUBLE_AND(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.DOUBLE_AND, 0); }
public DOUBLE_VERTICAL_CYLINDER(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.DOUBLE_VERTICAL_CYLINDER, 0); }
public NULL_COALESCE(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.NULL_COALESCE, 0); }
constructor(ctx: ExpressionContext) {
super(ctx.parent, ctx.invokingState);
this.copyFrom(ctx);
}
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterBinaryOpExp) {
listener.enterBinaryOpExp(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitBinaryOpExp) {
listener.exitBinaryOpExp(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitBinaryOpExp) {
return visitor.visitBinaryOpExp(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class TripleOpExpContext extends ExpressionContext {
public expression(): ExpressionContext[];
public expression(i: number): ExpressionContext;
public expression(i?: number): ExpressionContext | ExpressionContext[] {
if (i === undefined) {
return this.getRuleContexts(ExpressionContext);
} else {
return this.getRuleContext(i, ExpressionContext);
}
}
public QUESTION_MARK(): TerminalNode { return this.getToken(ExpressionAntlrParser.QUESTION_MARK, 0); }
public COLON(): TerminalNode { return this.getToken(ExpressionAntlrParser.COLON, 0); }
constructor(ctx: ExpressionContext) {
super(ctx.parent, ctx.invokingState);
this.copyFrom(ctx);
}
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterTripleOpExp) {
listener.enterTripleOpExp(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitTripleOpExp) {
listener.exitTripleOpExp(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitTripleOpExp) {
return visitor.visitTripleOpExp(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class PrimaryExpContext extends ExpressionContext {
public primaryExpression(): PrimaryExpressionContext {
return this.getRuleContext(0, PrimaryExpressionContext);
}
constructor(ctx: ExpressionContext) {
super(ctx.parent, ctx.invokingState);
this.copyFrom(ctx);
}
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterPrimaryExp) {
listener.enterPrimaryExp(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitPrimaryExp) {
listener.exitPrimaryExp(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitPrimaryExp) {
return visitor.visitPrimaryExp(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class PrimaryExpressionContext extends ParserRuleContext {
constructor(parent: ParserRuleContext | undefined, invokingState: number) {
super(parent, invokingState);
}
// @Override
public get ruleIndex(): number { return ExpressionAntlrParser.RULE_primaryExpression; }
public copyFrom(ctx: PrimaryExpressionContext): void {
super.copyFrom(ctx);
}
}
export class ParenthesisExpContext extends PrimaryExpressionContext {
public OPEN_BRACKET(): TerminalNode { return this.getToken(ExpressionAntlrParser.OPEN_BRACKET, 0); }
public expression(): ExpressionContext {
return this.getRuleContext(0, ExpressionContext);
}
public CLOSE_BRACKET(): TerminalNode { return this.getToken(ExpressionAntlrParser.CLOSE_BRACKET, 0); }
constructor(ctx: PrimaryExpressionContext) {
super(ctx.parent, ctx.invokingState);
this.copyFrom(ctx);
}
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterParenthesisExp) {
listener.enterParenthesisExp(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitParenthesisExp) {
listener.exitParenthesisExp(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitParenthesisExp) {
return visitor.visitParenthesisExp(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class ArrayCreationExpContext extends PrimaryExpressionContext {
public OPEN_SQUARE_BRACKET(): TerminalNode { return this.getToken(ExpressionAntlrParser.OPEN_SQUARE_BRACKET, 0); }
public CLOSE_SQUARE_BRACKET(): TerminalNode { return this.getToken(ExpressionAntlrParser.CLOSE_SQUARE_BRACKET, 0); }
public argsList(): ArgsListContext | undefined {
return this.tryGetRuleContext(0, ArgsListContext);
}
constructor(ctx: PrimaryExpressionContext) {
super(ctx.parent, ctx.invokingState);
this.copyFrom(ctx);
}
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterArrayCreationExp) {
listener.enterArrayCreationExp(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitArrayCreationExp) {
listener.exitArrayCreationExp(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitArrayCreationExp) {
return visitor.visitArrayCreationExp(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class JsonCreationExpContext extends PrimaryExpressionContext {
public OPEN_CURLY_BRACKET(): TerminalNode { return this.getToken(ExpressionAntlrParser.OPEN_CURLY_BRACKET, 0); }
public CLOSE_CURLY_BRACKET(): TerminalNode { return this.getToken(ExpressionAntlrParser.CLOSE_CURLY_BRACKET, 0); }
public keyValuePairList(): KeyValuePairListContext | undefined {
return this.tryGetRuleContext(0, KeyValuePairListContext);
}
constructor(ctx: PrimaryExpressionContext) {
super(ctx.parent, ctx.invokingState);
this.copyFrom(ctx);
}
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterJsonCreationExp) {
listener.enterJsonCreationExp(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitJsonCreationExp) {
listener.exitJsonCreationExp(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitJsonCreationExp) {
return visitor.visitJsonCreationExp(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class NumericAtomContext extends PrimaryExpressionContext {
public NUMBER(): TerminalNode { return this.getToken(ExpressionAntlrParser.NUMBER, 0); }
constructor(ctx: PrimaryExpressionContext) {
super(ctx.parent, ctx.invokingState);
this.copyFrom(ctx);
}
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterNumericAtom) {
listener.enterNumericAtom(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitNumericAtom) {
listener.exitNumericAtom(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitNumericAtom) {
return visitor.visitNumericAtom(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class StringAtomContext extends PrimaryExpressionContext {
public STRING(): TerminalNode { return this.getToken(ExpressionAntlrParser.STRING, 0); }
constructor(ctx: PrimaryExpressionContext) {
super(ctx.parent, ctx.invokingState);
this.copyFrom(ctx);
}
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterStringAtom) {
listener.enterStringAtom(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitStringAtom) {
listener.exitStringAtom(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitStringAtom) {
return visitor.visitStringAtom(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class IdAtomContext extends PrimaryExpressionContext {
public IDENTIFIER(): TerminalNode { return this.getToken(ExpressionAntlrParser.IDENTIFIER, 0); }
constructor(ctx: PrimaryExpressionContext) {
super(ctx.parent, ctx.invokingState);
this.copyFrom(ctx);
}
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterIdAtom) {
listener.enterIdAtom(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitIdAtom) {
listener.exitIdAtom(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitIdAtom) {
return visitor.visitIdAtom(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class StringInterpolationAtomContext extends PrimaryExpressionContext {
public stringInterpolation(): StringInterpolationContext {
return this.getRuleContext(0, StringInterpolationContext);
}
constructor(ctx: PrimaryExpressionContext) {
super(ctx.parent, ctx.invokingState);
this.copyFrom(ctx);
}
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterStringInterpolationAtom) {
listener.enterStringInterpolationAtom(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitStringInterpolationAtom) {
listener.exitStringInterpolationAtom(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitStringInterpolationAtom) {
return visitor.visitStringInterpolationAtom(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class MemberAccessExpContext extends PrimaryExpressionContext {
public primaryExpression(): PrimaryExpressionContext {
return this.getRuleContext(0, PrimaryExpressionContext);
}
public DOT(): TerminalNode { return this.getToken(ExpressionAntlrParser.DOT, 0); }
public IDENTIFIER(): TerminalNode { return this.getToken(ExpressionAntlrParser.IDENTIFIER, 0); }
constructor(ctx: PrimaryExpressionContext) {
super(ctx.parent, ctx.invokingState);
this.copyFrom(ctx);
}
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterMemberAccessExp) {
listener.enterMemberAccessExp(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitMemberAccessExp) {
listener.exitMemberAccessExp(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitMemberAccessExp) {
return visitor.visitMemberAccessExp(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class FuncInvokeExpContext extends PrimaryExpressionContext {
public primaryExpression(): PrimaryExpressionContext {
return this.getRuleContext(0, PrimaryExpressionContext);
}
public OPEN_BRACKET(): TerminalNode { return this.getToken(ExpressionAntlrParser.OPEN_BRACKET, 0); }
public CLOSE_BRACKET(): TerminalNode { return this.getToken(ExpressionAntlrParser.CLOSE_BRACKET, 0); }
public NON(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.NON, 0); }
public argsList(): ArgsListContext | undefined {
return this.tryGetRuleContext(0, ArgsListContext);
}
constructor(ctx: PrimaryExpressionContext) {
super(ctx.parent, ctx.invokingState);
this.copyFrom(ctx);
}
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterFuncInvokeExp) {
listener.enterFuncInvokeExp(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitFuncInvokeExp) {
listener.exitFuncInvokeExp(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitFuncInvokeExp) {
return visitor.visitFuncInvokeExp(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class IndexAccessExpContext extends PrimaryExpressionContext {
public primaryExpression(): PrimaryExpressionContext {
return this.getRuleContext(0, PrimaryExpressionContext);
}
public OPEN_SQUARE_BRACKET(): TerminalNode { return this.getToken(ExpressionAntlrParser.OPEN_SQUARE_BRACKET, 0); }
public expression(): ExpressionContext {
return this.getRuleContext(0, ExpressionContext);
}
public CLOSE_SQUARE_BRACKET(): TerminalNode { return this.getToken(ExpressionAntlrParser.CLOSE_SQUARE_BRACKET, 0); }
constructor(ctx: PrimaryExpressionContext) {
super(ctx.parent, ctx.invokingState);
this.copyFrom(ctx);
}
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterIndexAccessExp) {
listener.enterIndexAccessExp(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitIndexAccessExp) {
listener.exitIndexAccessExp(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitIndexAccessExp) {
return visitor.visitIndexAccessExp(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class StringInterpolationContext extends ParserRuleContext {
public STRING_INTERPOLATION_START(): TerminalNode[];
public STRING_INTERPOLATION_START(i: number): TerminalNode;
public STRING_INTERPOLATION_START(i?: number): TerminalNode | TerminalNode[] {
if (i === undefined) {
return this.getTokens(ExpressionAntlrParser.STRING_INTERPOLATION_START);
} else {
return this.getToken(ExpressionAntlrParser.STRING_INTERPOLATION_START, i);
}
}
public ESCAPE_CHARACTER(): TerminalNode[];
public ESCAPE_CHARACTER(i: number): TerminalNode;
public ESCAPE_CHARACTER(i?: number): TerminalNode | TerminalNode[] {
if (i === undefined) {
return this.getTokens(ExpressionAntlrParser.ESCAPE_CHARACTER);
} else {
return this.getToken(ExpressionAntlrParser.ESCAPE_CHARACTER, i);
}
}
public TEMPLATE(): TerminalNode[];
public TEMPLATE(i: number): TerminalNode;
public TEMPLATE(i?: number): TerminalNode | TerminalNode[] {
if (i === undefined) {
return this.getTokens(ExpressionAntlrParser.TEMPLATE);
} else {
return this.getToken(ExpressionAntlrParser.TEMPLATE, i);
}
}
public textContent(): TextContentContext[];
public textContent(i: number): TextContentContext;
public textContent(i?: number): TextContentContext | TextContentContext[] {
if (i === undefined) {
return this.getRuleContexts(TextContentContext);
} else {
return this.getRuleContext(i, TextContentContext);
}
}
constructor(parent: ParserRuleContext | undefined, invokingState: number) {
super(parent, invokingState);
}
// @Override
public get ruleIndex(): number { return ExpressionAntlrParser.RULE_stringInterpolation; }
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterStringInterpolation) {
listener.enterStringInterpolation(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitStringInterpolation) {
listener.exitStringInterpolation(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitStringInterpolation) {
return visitor.visitStringInterpolation(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class TextContentContext extends ParserRuleContext {
public TEXT_CONTENT(): TerminalNode[];
public TEXT_CONTENT(i: number): TerminalNode;
public TEXT_CONTENT(i?: number): TerminalNode | TerminalNode[] {
if (i === undefined) {
return this.getTokens(ExpressionAntlrParser.TEXT_CONTENT);
} else {
return this.getToken(ExpressionAntlrParser.TEXT_CONTENT, i);
}
}
constructor(parent: ParserRuleContext | undefined, invokingState: number) {
super(parent, invokingState);
}
// @Override
public get ruleIndex(): number { return ExpressionAntlrParser.RULE_textContent; }
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterTextContent) {
listener.enterTextContent(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitTextContent) {
listener.exitTextContent(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitTextContent) {
return visitor.visitTextContent(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class ArgsListContext extends ParserRuleContext {
public lambda(): LambdaContext[];
public lambda(i: number): LambdaContext;
public lambda(i?: number): LambdaContext | LambdaContext[] {
if (i === undefined) {
return this.getRuleContexts(LambdaContext);
} else {
return this.getRuleContext(i, LambdaContext);
}
}
public expression(): ExpressionContext[];
public expression(i: number): ExpressionContext;
public expression(i?: number): ExpressionContext | ExpressionContext[] {
if (i === undefined) {
return this.getRuleContexts(ExpressionContext);
} else {
return this.getRuleContext(i, ExpressionContext);
}
}
public COMMA(): TerminalNode[];
public COMMA(i: number): TerminalNode;
public COMMA(i?: number): TerminalNode | TerminalNode[] {
if (i === undefined) {
return this.getTokens(ExpressionAntlrParser.COMMA);
} else {
return this.getToken(ExpressionAntlrParser.COMMA, i);
}
}
constructor(parent: ParserRuleContext | undefined, invokingState: number) {
super(parent, invokingState);
}
// @Override
public get ruleIndex(): number { return ExpressionAntlrParser.RULE_argsList; }
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterArgsList) {
listener.enterArgsList(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitArgsList) {
listener.exitArgsList(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitArgsList) {
return visitor.visitArgsList(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class LambdaContext extends ParserRuleContext {
public IDENTIFIER(): TerminalNode { return this.getToken(ExpressionAntlrParser.IDENTIFIER, 0); }
public ARROW(): TerminalNode { return this.getToken(ExpressionAntlrParser.ARROW, 0); }
public expression(): ExpressionContext {
return this.getRuleContext(0, ExpressionContext);
}
constructor(parent: ParserRuleContext | undefined, invokingState: number) {
super(parent, invokingState);
}
// @Override
public get ruleIndex(): number { return ExpressionAntlrParser.RULE_lambda; }
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterLambda) {
listener.enterLambda(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitLambda) {
listener.exitLambda(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitLambda) {
return visitor.visitLambda(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class KeyValuePairListContext extends ParserRuleContext {
public keyValuePair(): KeyValuePairContext[];
public keyValuePair(i: number): KeyValuePairContext;
public keyValuePair(i?: number): KeyValuePairContext | KeyValuePairContext[] {
if (i === undefined) {
return this.getRuleContexts(KeyValuePairContext);
} else {
return this.getRuleContext(i, KeyValuePairContext);
}
}
public COMMA(): TerminalNode[];
public COMMA(i: number): TerminalNode;
public COMMA(i?: number): TerminalNode | TerminalNode[] {
if (i === undefined) {
return this.getTokens(ExpressionAntlrParser.COMMA);
} else {
return this.getToken(ExpressionAntlrParser.COMMA, i);
}
}
constructor(parent: ParserRuleContext | undefined, invokingState: number) {
super(parent, invokingState);
}
// @Override
public get ruleIndex(): number { return ExpressionAntlrParser.RULE_keyValuePairList; }
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterKeyValuePairList) {
listener.enterKeyValuePairList(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitKeyValuePairList) {
listener.exitKeyValuePairList(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitKeyValuePairList) {
return visitor.visitKeyValuePairList(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class KeyValuePairContext extends ParserRuleContext {
public key(): KeyContext {
return this.getRuleContext(0, KeyContext);
}
public COLON(): TerminalNode { return this.getToken(ExpressionAntlrParser.COLON, 0); }
public expression(): ExpressionContext {
return this.getRuleContext(0, ExpressionContext);
}
constructor(parent: ParserRuleContext | undefined, invokingState: number) {
super(parent, invokingState);
}
// @Override
public get ruleIndex(): number { return ExpressionAntlrParser.RULE_keyValuePair; }
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterKeyValuePair) {
listener.enterKeyValuePair(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitKeyValuePair) {
listener.exitKeyValuePair(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitKeyValuePair) {
return visitor.visitKeyValuePair(this);
} else {
return visitor.visitChildren(this);
}
}
}
export class KeyContext extends ParserRuleContext {
public IDENTIFIER(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.IDENTIFIER, 0); }
public STRING(): TerminalNode | undefined { return this.tryGetToken(ExpressionAntlrParser.STRING, 0); }
constructor(parent: ParserRuleContext | undefined, invokingState: number) {
super(parent, invokingState);
}
// @Override
public get ruleIndex(): number { return ExpressionAntlrParser.RULE_key; }
// @Override
public enterRule(listener: ExpressionAntlrParserListener): void {
if (listener.enterKey) {
listener.enterKey(this);
}
}
// @Override
public exitRule(listener: ExpressionAntlrParserListener): void {
if (listener.exitKey) {
listener.exitKey(this);
}
}
// @Override
public accept<Result>(visitor: ExpressionAntlrParserVisitor<Result>): Result {
if (visitor.visitKey) {
return visitor.visitKey(this);
} else {
return visitor.visitChildren(this);
}
}
} | the_stack |
import urlJoin from 'url-join';
import deburr from 'lodash/deburr';
import flatten from 'lodash/flatten';
import isNumber from 'lodash/isNumber';
import words from 'lodash/words';
import max from 'lodash/max';
import { color } from 'd3-color';
import { schemeCategory10 } from 'd3-scale-chromatic';
import { fetchProjectionsRegion } from 'common/utils/model';
import { Column, DatasetId, Projection } from 'common/models/Projection';
import { share_image_url } from 'assets/data/share_images_url.json';
import { SeriesType, Series, ExploreMetric, DataMeasure } from './interfaces';
import regions, {
County,
MetroArea,
Region,
RegionType,
State,
getAutocompleteRegions,
USA,
} from 'common/regions';
import { fail } from 'assert';
import { pluralize, formatPercent, formatDecimal } from 'common/utils';
import {
TimeUnit,
DateFormat,
formatDateTime,
getTimeDiff,
} from 'common/utils/time-utils';
import { subtractTime } from 'common/utils/time-utils';
export enum Period {
DAYS_60,
DAYS_180,
ALL,
}
const EXPLORE_PERIODS = [Period.DAYS_60, Period.DAYS_180, Period.ALL];
interface PeriodDefinition {
increment: number;
label: string;
xTicktimeUnit: TimeUnit;
}
export const periodMap: {
[period in Period]: PeriodDefinition;
} = {
[Period.DAYS_60]: {
increment: 60,
label: '60',
xTicktimeUnit: TimeUnit.WEEKS,
},
[Period.DAYS_180]: {
increment: 180,
label: '180',
xTicktimeUnit: TimeUnit.MONTHS,
},
[Period.ALL]: {
increment: -1,
label: 'All time',
xTicktimeUnit: TimeUnit.MONTHS,
},
};
export function getPeriodLabel(period: Period) {
return periodMap[period].label;
}
export function getXTickTimeUnitForPeriod(period: Period) {
return periodMap[period].xTicktimeUnit;
}
export function getAllPeriodLabels() {
return EXPLORE_PERIODS.map(getPeriodLabel);
}
export function getDateRange(period: Period): Date[] {
const dateTo = new Date();
const dateFrom =
period === Period.ALL
? new Date('2020-03-01')
: subtractTime(dateTo, periodMap[period].increment, TimeUnit.DAYS);
return [dateFrom, dateTo];
}
export const EXPLORE_METRICS = [
ExploreMetric.CASES,
ExploreMetric.DEATHS,
ExploreMetric.HOSPITALIZATIONS,
ExploreMetric.ICU_HOSPITALIZATIONS,
ExploreMetric.VACCINATIONS_FIRST_DOSE,
ExploreMetric.VACCINATIONS_COMPLETED,
ExploreMetric.ICU_USED,
ExploreMetric.POSITIVITY_RATE,
];
// Note that these specifically are counts, not percentages, and can normalized
// to per 100K when multiple locations are displayed, in which case we manipulate
// the labels to indicate the 'per 100K'.
// Therefore, don't add a 'percentage' type metric to this without updating
// that code accordingly.
export const ORIGINAL_EXPLORE_METRICS = [
ExploreMetric.CASES,
ExploreMetric.DEATHS,
ExploreMetric.HOSPITALIZATIONS,
ExploreMetric.ICU_HOSPITALIZATIONS,
];
export function getMetricByChartId(chartId: string): ExploreMetric | undefined {
switch (chartId) {
case 'cases':
return ExploreMetric.CASES;
case 'deaths':
return ExploreMetric.DEATHS;
case 'hospitalizations':
return ExploreMetric.HOSPITALIZATIONS;
case 'icu-hospitalizations':
return ExploreMetric.ICU_HOSPITALIZATIONS;
case 'vaccinations':
return ExploreMetric.VACCINATIONS_FIRST_DOSE;
case 'vaccinationsCompleted':
return ExploreMetric.VACCINATIONS_COMPLETED;
case 'icuUtilization':
return ExploreMetric.ICU_USED;
case 'testPositiveRate':
return ExploreMetric.POSITIVITY_RATE;
}
}
function getDatasetIdByMetric(metric: ExploreMetric): DatasetId {
switch (metric) {
case ExploreMetric.CASES:
return 'smoothedDailyCases';
case ExploreMetric.DEATHS:
return 'smoothedDailyDeaths';
case ExploreMetric.HOSPITALIZATIONS:
return 'smoothedHospitalizations';
case ExploreMetric.ICU_HOSPITALIZATIONS:
return 'smoothedICUHospitalizations';
case ExploreMetric.VACCINATIONS_FIRST_DOSE:
return 'vaccinations';
case ExploreMetric.VACCINATIONS_COMPLETED:
return 'vaccinationsCompleted';
case ExploreMetric.ICU_USED:
return 'icuUtilization';
case ExploreMetric.POSITIVITY_RATE:
return 'testPositiveRate';
}
}
export const getYFormat = (dataMeasure: DataMeasure, places: number) => {
const yFormat = (value: number) =>
dataMeasure === DataMeasure.PERCENT
? formatPercent(value, places)
: formatDecimal(value, places);
return yFormat;
};
interface SerieDescription {
label: string;
tooltipLabel: string;
datasetId: DatasetId;
type: SeriesType;
}
interface ExploreMetricDescription {
title: string;
name: string;
chartId: string;
seriesList: SerieDescription[];
dataMeasure: DataMeasure;
yAxisDecimalPlaces: number;
maxY?: number;
}
export const exploreMetricData: {
[metric in ExploreMetric]: ExploreMetricDescription;
} = {
[ExploreMetric.CASES]: {
title: 'Cases',
name: 'Cases',
chartId: 'cases',
dataMeasure: DataMeasure.INTEGER,
yAxisDecimalPlaces: 0,
seriesList: [
{
label: 'Cases',
tooltipLabel: 'cases',
datasetId: 'rawDailyCases',
type: SeriesType.BAR,
},
{
label: '7 Day Average',
tooltipLabel: 'cases',
datasetId: 'smoothedDailyCases',
type: SeriesType.LINE,
},
],
},
[ExploreMetric.DEATHS]: {
title: 'Deaths',
name: 'Deaths',
chartId: 'deaths',
dataMeasure: DataMeasure.INTEGER,
yAxisDecimalPlaces: 1,
seriesList: [
{
label: 'Deaths',
tooltipLabel: 'Deaths',
datasetId: 'rawDailyDeaths',
type: SeriesType.BAR,
},
{
label: '7 Day Average',
tooltipLabel: 'Deaths',
datasetId: 'smoothedDailyDeaths',
type: SeriesType.LINE,
},
],
},
[ExploreMetric.HOSPITALIZATIONS]: {
title: 'Hospitalizations',
name: 'Current COVID Hospitalizations',
chartId: 'hospitalizations',
dataMeasure: DataMeasure.INTEGER,
yAxisDecimalPlaces: 0,
seriesList: [
{
label: 'Current COVID Hospitalizations',
tooltipLabel: 'COVID Hospitalizations',
datasetId: 'rawHospitalizations',
type: SeriesType.BAR,
},
{
label: '7 Day Average',
tooltipLabel: 'COVID Hospitalizations',
datasetId: 'smoothedHospitalizations',
type: SeriesType.LINE,
},
],
},
[ExploreMetric.ICU_HOSPITALIZATIONS]: {
title: 'ICU hospitalizations',
name: 'Current COVID ICU Hospitalizations',
chartId: 'icu-hospitalizations',
dataMeasure: DataMeasure.INTEGER,
yAxisDecimalPlaces: 1,
seriesList: [
{
label: 'Current COVID ICU Hospitalizations',
tooltipLabel: 'COVID ICU Hospitalizations',
datasetId: 'rawICUHospitalizations',
type: SeriesType.BAR,
},
{
label: '7 Day Average',
tooltipLabel: 'COVID ICU Hospitalizations',
datasetId: 'smoothedICUHospitalizations',
type: SeriesType.LINE,
},
],
},
[ExploreMetric.VACCINATIONS_FIRST_DOSE]: {
title: 'Percent vaccinated (1+ dose)',
name: 'Percent vaccinated (1+ dose)',
chartId: 'vaccinations_first_dose',
dataMeasure: DataMeasure.PERCENT,
yAxisDecimalPlaces: 0,
maxY: 1,
seriesList: [
{
label: 'Percent Vaccinated (1+ dose)',
tooltipLabel: 'Percent Vaccinated (1+ dose)',
datasetId: 'vaccinations',
type: SeriesType.LINE,
},
],
},
[ExploreMetric.VACCINATIONS_COMPLETED]: {
title: 'Percent vaccinated (fully)',
name: 'Percent vaccinated (fully)',
chartId: 'vaccinations_completed',
dataMeasure: DataMeasure.PERCENT,
yAxisDecimalPlaces: 0,
maxY: 1,
seriesList: [
{
label: 'Percent Vaccinated (fully)',
tooltipLabel: 'Percent Vaccinated (fully)',
datasetId: 'vaccinationsCompleted',
type: SeriesType.LINE,
},
],
},
[ExploreMetric.ICU_USED]: {
title: 'ICU capacity used',
name: 'ICU capacity used',
chartId: 'icu_capacity_used',
dataMeasure: DataMeasure.PERCENT,
yAxisDecimalPlaces: 0,
seriesList: [
{
label: 'ICU capacity used',
tooltipLabel: 'ICU capacity used',
datasetId: 'icuUtilization',
type: SeriesType.LINE,
},
],
},
[ExploreMetric.POSITIVITY_RATE]: {
title: 'Positive test rate',
name: 'Positive test rate',
chartId: 'positivity_rate',
dataMeasure: DataMeasure.PERCENT,
yAxisDecimalPlaces: 1,
seriesList: [
{
label: 'Positive test rate',
tooltipLabel: 'Positive test rate',
datasetId: 'testPositiveRate',
type: SeriesType.LINE,
},
],
},
};
export const EXPLORE_CHART_IDS = Object.values(exploreMetricData).map(
metric => metric.chartId,
);
/**
* Remove points without y values at the begining and end of the series.
*
* TODO(pablo): Ideally, we would like to remove segments that are missing
* values in the middle of the series too, but that will require changing
* the implementation of the charts to handle segments instead of a continuous
* series.
*/
export function cleanSeries(data: Column[]) {
return data.filter(point => isNumber(point.y));
}
/**
* Returns both the raw and smoothed series for the given metric and
* projection. It's used for the single-location Explore chart, which
* represents the raw data with bars and smoothed data with a line.
*/
export function getAllSeriesForMetric(
metric: ExploreMetric,
projection: Projection,
): Series[] {
const metricDefinition = exploreMetricData[metric];
return metricDefinition.seriesList.map(item => ({
data: cleanSeries(projection.getDataset(item.datasetId)),
type: item.type,
label: item.label,
shortLabel: item.label,
tooltipLabel: item.tooltipLabel,
}));
}
function scalePer100k(data: Column[], population: number) {
return data.map(({ x, y }) => ({ x, y: y / (population / 100000) }));
}
/**
* Returns the smoothed series for a given metric and projection. It's
* used for the multiple-locations Explore chart. It receives a color
* so we can differentiate the lines in the chart
*/
export function getAveragedSeriesForMetric(
metric: ExploreMetric,
projection: Projection,
color: string,
normalizeData: boolean,
): Series {
const { fips, totalPopulation } = projection;
const datasetId = getDatasetIdByMetric(metric);
const location = regions.findByFipsCode(fips)!;
const data = cleanSeries(projection.getDataset(datasetId));
const metricName = exploreMetricData[metric].seriesList[0].tooltipLabel;
return {
data: normalizeData ? scalePer100k(data, totalPopulation) : data,
type: SeriesType.LINE,
params: {
stroke: color,
fill: color,
},
label: getLocationLabel(location),
shortLabel: getShortLocationLabel(location),
tooltipLabel: normalizeData
? `${metricName} per 100k population`
: metricName,
};
}
export function getTitle(metric: ExploreMetric) {
return exploreMetricData[metric].title;
}
export function getMetricDataMeasure(metric: ExploreMetric) {
return exploreMetricData[metric].dataMeasure;
}
export function getYAxisDecimalPlaces(metric: ExploreMetric) {
return exploreMetricData[metric].yAxisDecimalPlaces;
}
export function getMaxYFromDefinition(metric: ExploreMetric): number | null {
const maxY = exploreMetricData[metric].maxY;
return !maxY ? null : maxY;
}
export function getMetricName(metric: ExploreMetric) {
return exploreMetricData[metric].name;
}
export function getChartIdByMetric(metric: ExploreMetric) {
return exploreMetricData[metric].chartId;
}
export function getMetricLabels(multiLocation: boolean): string[] {
return EXPLORE_METRICS.map((metric: ExploreMetric) => {
const title = getTitle(metric);
if (multiLocation && ORIGINAL_EXPLORE_METRICS.includes(metric)) {
return title + ' per 100K';
} else {
return title;
}
});
}
export function findPointByDate(data: Column[], date: Date): Column | null {
const idx = data.findIndex(
p => new Date(p.x).toDateString() === date.toDateString(),
);
return idx >= 0 ? data[idx] : null;
}
function sanitizeLocationName(name: string) {
return words(deburr(name)).join('-').toLowerCase();
}
function getLocationFileName(region: Region) {
return sanitizeLocationName(region.fullName);
}
export function getImageFilename(locations: Region[], metric: ExploreMetric) {
const downloadDate = formatDateTime(new Date(), DateFormat.YYYY_MM_DD);
const chartId = getChartIdByMetric(metric);
const fileNameSuffix = `${chartId}-${downloadDate}`;
switch (locations.length) {
case 0:
return `${fileNameSuffix}.png`;
case 1:
case 2:
const locationNames = locations.map(getLocationFileName).join('-');
return `${locationNames}-${fileNameSuffix}.png`;
default:
return `multiple-locations-${fileNameSuffix}.png`;
}
}
/**
* Generates the URL of the export images for the given fips code and chart.
* It needs to be consistent with the share image routing in
* src/screens/internal/ShareImage/ShareImage.tsx.
*/
export function getExportImageUrl(sharedComponentId: string) {
return urlJoin(share_image_url, `share/${sharedComponentId}/export.png`);
}
export function getChartUrl(sharedComponentId: string, region: Region | null) {
const redirectTo = urlJoin(
region ? region.relativeUrl : '/',
'explore',
sharedComponentId,
);
const url = urlJoin(window.location.origin, 'share', sharedComponentId);
// NOTE: Trailing '/' is significant so we hit the index.html page with correct meta tags and
// so we don't get redirected and lose the query params.
return `${url}/?redirectTo=${encodeURIComponent(redirectTo)}`;
}
export function getSocialQuote(regions: Region[], metric: ExploreMetric) {
const locationName = getLocationNames(regions, /*limit=*/ 5);
switch (metric) {
case ExploreMetric.CASES:
return `Daily cases in ${locationName}, according to @CovidActNow. See the chart: `;
case ExploreMetric.DEATHS:
return `Daily deaths in ${locationName}, according to @CovidActNow. See the chart: `;
case ExploreMetric.HOSPITALIZATIONS:
return `Hospitalizations in ${locationName}, according to @CovidActNow. See the chart: `;
case ExploreMetric.ICU_HOSPITALIZATIONS:
return `ICU hospitalizations in ${locationName}, according to @CovidActNow. See the chart: `;
case ExploreMetric.VACCINATIONS_FIRST_DOSE:
return `Percent vaccinated (1+ dose) in ${locationName}, according to @CovidActNow. See the chart: `;
case ExploreMetric.VACCINATIONS_COMPLETED:
return `Percent vaccinated (fully) in ${locationName}, according to @CovidActNow. See the chart: `;
case ExploreMetric.ICU_USED:
return `ICU capacity used in ${locationName}, according to @CovidActNow. See the chart: `;
case ExploreMetric.POSITIVITY_RATE:
return `Positive test rate in ${locationName}, according to @CovidActNow. See the chart: `;
}
return '';
}
const pluralizeWeeks = (num: number) => pluralize(num, 'week', 'weeks');
const pluralizeDays = (num: number) => pluralize(num, 'day', 'days');
/**
* Returns the relative time between two dates in days and weeks, for example:
* today, 1 day ago, 5 days ago, 3 weeks and 2 days ago, 5 weeks ago, etc.
*/
export function weeksAgo(dateFrom: Date, dateTo: Date) {
const totalDays = Math.floor(getTimeDiff(dateTo, dateFrom, TimeUnit.DAYS));
const totalWeeks = Math.floor(totalDays / 7);
const numDaysWithinPastWeek = Math.floor(totalDays % 7);
if (totalDays < 7) {
return totalDays === 0
? 'today'
: `${formatDecimal(totalDays, 0)} ${pluralizeDays(totalDays)} ago`;
} else {
const weeksAgo = `${formatDecimal(totalWeeks, 0)} ${pluralizeWeeks(
totalWeeks,
)}`;
const daysAgo =
numDaysWithinPastWeek > 0
? `, ${formatDecimal(numDaysWithinPastWeek, 0)} ${pluralizeDays(
numDaysWithinPastWeek,
)}`
: '';
return `${weeksAgo} ${daysAgo} ago`;
}
}
export function getLocationLabel(location: Region) {
if (location instanceof County) {
return `${location.abbreviation} ${(location as County).stateCode}`;
} else if (location instanceof State) {
return location.fullName;
} else if (location instanceof MetroArea) {
return location.fullName;
} else if (location instanceof USA) {
return location.shortName;
} else {
fail('unsupported region');
}
}
function truncateCountyName(countyName: string) {
return countyName.length <= 11
? countyName
: `${countyName.slice(0, 11).trim()}…`;
}
function getShortLocationLabel(location: Region) {
return location.regionType === RegionType.COUNTY
? truncateCountyName(location.abbreviation)
: location.abbreviation;
}
export function getLocationNames(
locations: Region[],
limit = Number.POSITIVE_INFINITY,
) {
if (locations.length === 1) {
return getLocationLabel(locations[0]);
}
const labels = locations.map(getLocationLabel);
const truncate = labels.length > limit;
const [firstLabels, lastLabel] = truncate
? [labels.slice(0, limit), 'more']
: [labels.slice(0, labels.length - 1), labels[labels.length - 1]];
return `${firstLabels.join(', ')} and ${lastLabel}`;
}
export function getExploreAutocompleteLocations(locationFips: string) {
const currentLocation = regions.findByFipsCode(locationFips)!;
const locations = getAutocompleteRegions(currentLocation);
return [regions.usa, ...locations];
}
/**
* An array of 10 colors designed for categorical data. See
* https://github.com/d3/d3-scale-chromatic for additional options
*/
const SERIES_COLORS = schemeCategory10;
async function getProjectionForRegion(region: Region): Promise<Projection> {
const projections = await fetchProjectionsRegion(region);
return projections.primary;
}
export function getChartSeries(
metric: ExploreMetric,
regions: Region[],
normalizeData: boolean,
): Promise<Series[]> {
if (regions.length === 1) {
return getProjectionForRegion(regions[0]).then(projection =>
getAllSeriesForMetric(metric, projection),
);
} else {
return Promise.all(
regions.map(async (region, i) => {
const projection = await getProjectionForRegion(region);
return getAveragedSeriesForMetric(
metric,
projection,
SERIES_COLORS[i % SERIES_COLORS.length],
normalizeData,
);
}),
).then(flatten);
}
}
export function getSeriesLabel(series: Series, isMobile: boolean) {
return isMobile ? series.shortLabel : series.label;
}
/**
* The resulting color depends on the original color and the `amount` number,
* if the `amount` number is too high the resulting color will be white.
* https://github.com/d3/d3-color#color_brighter
*/
export function brightenColor(colorCode: string, amount = 1): string {
const colorObject = color(colorCode);
return colorObject ? colorObject.brighter(amount).hex() : colorCode;
}
const getY = (d: Column) => d.y;
function isInDateRange(date: Date, dateFrom: Date, dateTo: Date) {
return dateFrom <= date && date <= dateTo;
}
/* Gets overall maxY value to adjust y-axis when a new date range is selected from dropdown menu */
export function getMaxY(seriesList: Series[], dateFrom: Date, dateTo: Date) {
const maxPerSeries = seriesList.map(series => {
const dataInRange = series.data.filter(d =>
isInDateRange(new Date(d.x), dateFrom, dateTo),
);
const seriesMax = max(dataInRange.map(getY)) || 1;
return seriesMax;
});
const overallMaxY = max(maxPerSeries);
return overallMaxY;
} | the_stack |
import * as Debug from 'debug';
import { EventEmitter } from 'events';
import * as redis from 'redis';
import { LinkError } from './errors/LinkError';
import { ValidationError } from './errors/ValidationError';
import { getPrefix, INohmPrefixes } from './helpers';
import { IMiddlewareOptions, middleware, TRequestHandler } from './middleware';
import {
IDictionary,
ILinkOptions,
IModelOptions,
IModelPropertyDefinition,
IModelPropertyDefinitions,
ISortOptions,
NohmModel,
TLinkCallback,
} from './model';
import {
boolProperty,
dateProperty,
floatProperty,
IChangeEventPayload,
IDefaultEventPayload,
integerProperty,
IRelationChangeEventPayload,
ISearchOption,
IStaticMethods,
jsonProperty,
numberProperty,
stringProperty,
timeProperty,
timestampProperty,
TTypedDefinitions,
} from './model.header';
import { psubscribe, punsubscribe, keys, del } from './typed-redis-helper';
export {
boolProperty,
dateProperty,
floatProperty,
IChangeEventPayload,
IDefaultEventPayload,
IDictionary,
ILinkOptions,
IModelOptions,
IModelPropertyDefinition,
IModelPropertyDefinitions,
INohmPrefixes,
integerProperty,
IRelationChangeEventPayload,
ISortOptions,
IStaticMethods,
jsonProperty,
LinkError,
NohmModelExtendable as NohmModel,
numberProperty,
stringProperty,
timeProperty,
timestampProperty,
TLinkCallback,
TTypedDefinitions,
ValidationError,
};
export { nohm, nohm as Nohm };
const debug = Debug('nohm:index');
const debugPubSub = Debug('nohm:pubSub');
const PUBSUB_ALL_PATTERN = '*:*';
// this is the exported extendable version - still needs to be registered to receive proper methods
abstract class NohmModelExtendable<TProps = any> extends NohmModel<TProps> {
public client: redis.RedisClient;
protected nohmClass: NohmClass;
/**
* DO NOT OVERWRITE THIS; USED INTERNALLY
*
* @protected
*/
protected _initOptions() {
// overwritten in NohmClass.model/register
throw new Error(
'Class is not extended properly. Use the return Nohm.register() instead of your class directly.',
);
}
/**
* DO NOT OVERWRITE THIS; USED INTERNALLY
*
* @protected
*/
protected prefix(_prefix: keyof INohmPrefixes): string {
// overwritten in NohmClass.model/register
throw new Error(
'Class is not extended properly. Use the return Nohm.register() instead of your class directly.',
);
}
/**
* DO NOT OVERWRITE THIS; USED INTERNALLY
*
* @protected
*/
protected rawPrefix(): INohmPrefixes {
// overwritten in NohmClass.model/register
throw new Error(
'Class is not extended properly. Use the return Nohm.register() instead of your class directly.',
);
}
}
export interface INohmOptions {
prefix?: string;
client?: redis.RedisClient;
meta?: boolean;
publish?: boolean | redis.RedisClient;
}
export type Constructor<T> = new (...args: Array<any>) => T;
function staticImplements<T>() {
return (_constructor: T) => {
// no op decorator
};
}
/**
* Some generic definitions for Nohm
*
* @namespace Nohm
*/
/**
* Nohm specific Errors
*
* @namespace NohmErrors
*/
/**
* Main Nohm class. Holds models, generic configuration and can generate the middleware for client validations.
*
* Can be instantiated multiple times if you want different configurations, but usually you only need the default
* that is exported as `require('nohm').nohm`.
*
* @example
* // To instantiate another you can do this:
* const NohmClass = require('nohm').NohmClass;
* const myNohm = new NohmClass({ prefix: 'SomePrefix' });
*
* @class NohmClass
*/
export class NohmClass {
/**
* The redis prefixed key object.
* Defaults to prefixing with 'nohm' which then creates keys like 'nohm:idsets:someModel'.
*/
public prefix: INohmPrefixes;
/**
* The current global nohm redis client
*/
public client: redis.RedisClient;
public readonly LinkError = LinkError;
public readonly ValidationError = ValidationError;
/**
* Whether to store the meta values about models.
* This is used for example by the admin app.
* Defaults to true.
*/
private meta: boolean;
private publish: boolean = false;
private publishClient: redis.RedisClient;
private isPublishSubscribed: boolean;
private publishEventEmitter: EventEmitter;
private modelCache: {
[name: string]: Constructor<NohmModel<any>>;
};
private extraValidators: Array<string>;
constructor({ prefix, client, meta, publish }: INohmOptions) {
debug('Creating NohmClass.', arguments);
this.setPrefix(prefix);
if (client) {
this.setClient(client);
}
this.modelCache = {};
this.extraValidators = [];
this.meta = meta || true;
this.isPublishSubscribed = false;
if (typeof publish !== 'undefined') {
if (typeof publish !== 'boolean') {
this.setPublish(true);
this.setPubSubClient(publish);
} else {
this.setPublish(publish);
}
}
}
/**
* Set the Nohm global redis client.
* Note: this will not affect models that have a client set on their own.
*/
public setPrefix(prefix = 'nohm') {
debug('Setting new prefix.', prefix);
this.prefix = getPrefix(prefix);
}
/**
* Set the Nohm global redis client.
* Note: this will not affect models that have a client set on their own.
*/
public setClient(client?: redis.RedisClient) {
debug(
'Setting new redis client. Connected: %s; Address: %s.',
client && (client.connected || (client as any).status === 'ready'),
client && (client as any).address,
);
// ioredis uses .status string instead of .connected boolean
if (client && !(client.connected || (client as any).status === 'ready')) {
this
.logError(`WARNING: setClient() received a redis client that is not connected yet.
Consider waiting for an established connection before setting it. Status (if ioredis): ${
(client as any).status
}
, connected (if node_redis): ${client.connected}`);
} else if (!client) {
// TODO: maybe remove this, since it is also creating an unconnected client and is the only reason
// why we have the hard dependency on the "redis" package.
client = redis.createClient();
}
this.client = client;
}
public logError(err: string | Error | null) {
if (err) {
console.error({
// TODO: Make this a wrapped NohmError if not already an error
message: err,
name: 'Nohm Error',
});
}
}
/**
* Creates and returns a new model class with the given name and options.
* If you're using Typescript it is strongly advised to use Nohm.register() instead.
*
* @param {string} modelName Name of the model. This needs to be unique and is used in data storage.
* Thus <b>changing this will invalidate existing data</b>!
* @param {IModelDefinitions} options This is an object containing the actual model definitions.
* These are: properties, methods (optional) and the client (optional) to be used.
* @param {boolean} temp When true, this model is not added to the internal model cache,
* meaning methods like factory() and getModels() cannot access them.
* This is mostly useful for meta things like migrations.
* @returns {NohmStaticModel}
*/
public model<TAdditionalMethods>(
modelName: string,
options: IModelOptions & { properties: IModelPropertyDefinitions },
temp = false,
): Constructor<NohmModelExtendable<IDictionary> & TAdditionalMethods> &
IStaticMethods<NohmModel> {
if (!modelName) {
this.logError('When creating a new model you have to provide a name!');
}
debug('Registering new model using model().', modelName, options, temp);
// tslint:disable-next-line:no-this-assignment
const self = this; // well then...
let metaVersion = '';
/**
* The static Model Class, used to get instances or operate on multiple records.
*
* @class NohmStaticModel
*/
@staticImplements<IStaticMethods<CreatedClass>>()
// tslint:disable-next-line:max-classes-per-file
class CreatedClass extends NohmModelExtendable {
/**
* Redis client that is set for this Model.
* Defaults to the NohmClass client it was registered in.
*
* @memberof NohmStaticModel
* @type {RedisClient}
*/
public client = self.client;
protected nohmClass = self;
protected options = options;
/**
* Name of the model, used for database keys and relation values
*
* @memberof NohmStaticModel
* @type {string}
*/
public readonly modelName = modelName;
/**
* Creates an instance of CreatedClass.
*
* @ignore
* @memberof NohmStaticModel
*/
constructor() {
super();
if (self.meta) {
if (!metaVersion) {
// cache it to prevent constant regeneration
metaVersion = this.generateMetaVersion();
}
this.meta = {
inDb: false,
properties: this.options.properties,
version: metaVersion,
};
}
}
/* This (and .register()) is the only place where this method should exist.
An alternative would be to pass the options as a special argument to super, but that would have the downside
of making subclasses of subclasses impossible and restricting constructor argument freedom. */
protected _initOptions() {
this.options = options || { properties: {} };
Object.getPrototypeOf(this).definitions = this.options.properties;
this.meta = {
inDb: false,
properties: {},
version: '',
};
if (!this.client) {
this.client = self.client;
}
}
protected prefix(prefix: keyof INohmPrefixes): string {
return self.prefix[prefix] + modelName;
}
protected rawPrefix(): INohmPrefixes {
return self.prefix;
}
/**
* Creates a new model instance and loads it with the given id.
*
* @static
* @param {*} id ID of the model to be loaded
* @throws {Error('not found')} If no record exists of the given id,
* an error is thrown with the message 'not found'
* @alias load
* @memberof! NohmStaticModel
* @returns {Promise<NohmModel>}
*/
public static async load<P extends NohmModel>(id: any): Promise<P> {
const model = await self.factory<P>(modelName);
await model.load(id);
return model;
}
/**
* Loads an Array of NohmModels via the given ids. Any ids that do not exist will just be ignored.
* If any of the ids do not exist in the database, they are left out instead of throwing an error.
* Thus if no ids exist an empty error is returned.
*
* @static
* @param {Array<string>} ids Array of IDs of the models to be loaded
* @alias loadMany
* @memberof! NohmStaticModel
* @returns {Promise<NohmModel>}
*/
public static async loadMany<P extends NohmModel>(
ids: Array<string>,
): Promise<Array<P>> {
if (!Array.isArray(ids) || ids.length === 0) {
return [];
}
const loadPromises = ids.map(async (id) => {
try {
return await self.factory(modelName, id);
} catch (err) {
if (err && err.message === 'not found') {
return;
} else {
throw err;
}
}
});
const loadedModels = await Promise.all(loadPromises);
return loadedModels.filter<P>(
(model): model is P => typeof model !== 'undefined',
);
}
/**
* Finds ids of objects and loads them into full NohmModels.
*
* @static
* @param {ISearchOptions} searches
* @alias findAndLoad
* @memberof! NohmStaticModel
* @returns {Promise<Array<NohmModel>>}
*/
public static async findAndLoad<
P extends NohmModel,
TProps extends IDictionary
>(
searches?: Partial<
{
[key in keyof TProps]:
| string
| number
| boolean
| Partial<ISearchOption>;
}
>,
): Promise<Array<P>> {
const dummy = await self.factory<P>(modelName);
const ids = await dummy.find(searches);
if (ids.length === 0) {
return [];
}
const loadPromises = ids.map((id) => {
return self.factory<P>(modelName, id);
});
return Promise.all(loadPromises);
}
/**
* Sort the given ids or all stored ids by their SortOptions
*
* @static
* @see NohmModel.sort
* @param {ISortOptions<IDictionary>} [sortOptions={}] Search options
* @alias sort
* @memberof! NohmStaticModel
* @returns {Promise<Array<string>>} Array of ids
*/
public static async sort(
sortOptions: ISortOptions<IDictionary> = {},
ids: Array<string | number> | false = false,
): Promise<Array<string>> {
const dummy = await self.factory(modelName);
return dummy.sort(sortOptions, ids);
}
/**
* Search for ids
*
* @static
* @see NohmModel.find
* @param {ISearchOptions} [searches={}] Search options
* @alias find
* @memberof NohmStaticModel
* @returns {Promise<Array<string>>} Array of ids
*/
public static async find<TProps extends IDictionary>(
searches: Partial<
{
[key in keyof TProps]:
| string
| number
| boolean
| Partial<ISearchOption>;
}
> = {},
): Promise<Array<string>> {
const dummy = await self.factory(modelName);
return dummy.find(searches);
}
/**
* Loads a NohmModels via the given id.
*
* @static
* @param {*} id ID of the model to be loaded
* @alias remove
* @memberof NohmStaticModel
* @returns {Promise<NohmModel>}
*/
public static async remove(id: any, silent?: boolean): Promise<void> {
const model = await self.factory(modelName);
model.id = id;
await model.remove(silent);
}
}
if (!temp) {
this.modelCache[modelName] = CreatedClass;
}
return CreatedClass as any;
}
/**
* Creates, registers and returns a new model class from a given class.
* When using Typescript this is the preferred method of creating new models over using Nohm.model().
*
* @param {NohmModel} subClass Complete model class, needs to extend NohmModel.
* @param {boolean} temp When true, this model is not added to the internal model cache,
* meaning methods like factory() and getModels() cannot access them.
* This is mostly useful for meta things like migrations.
* @returns {NohmStaticModel}
*
* @example
* // Typescript
* import { Nohm, NohmModel, TTypedDefinitions } from 'nohm';
*
* // this interface is useful for having typings in .property() and .allProperties() etc. but is optional
* interface IUserModelProps {
* name: string;
* }
*
* class UserModelClass extends NohmModel<IUserModelProps> {
* protected static modelName = 'user'; // used in redis to store the keys
*
* // the TTypedDefinitions generic makes sure that our definitions have the same keys as
* // defined in our property interface.
* // If you don't want to use the generic, you have to use the exported {type}Property types
* // to get around the tsc throwing an error.
* // TODO: look into the error thrown by tsc when leaving out TTypedDefinitions and using 'sometype' as type
* protected static definitions: TTypedDefinitions<IUserModelProps> = {
* name: {
* defaultValue: 'testName',
* type: 'string', // you have to manually make sure this matches the IUserModelProps type!
* validations: [
* 'notEmpty',
* ],
* },
* };
* public async foo() {
* const test = bar.property('name'); // no error and test typed to string
*
* await bar.validate();
* bar.errors.name; // no error and typed
*
* // accessing unknown props does not work,
* // because we specified that UserModel only has properties of IUserModelProps
* bar.property('foo'); // typescript errors
* bar.errors.foo; // typescript error
* };
* }
* const userModel = Nohm.register(UserModelClass);
* // typescript now knows about bar.foo() and all the standard nohm methods like bar.property();
* const bar = new userModel();
* bar.foo(); // no error
* bar.allProperties().name === 'testName'; // no error
*/
public register<T extends Constructor<NohmModelExtendable<IDictionary>>>(
subClass: T,
temp = false,
): T & IStaticMethods<NohmModel> {
// tslint:disable-next-line:no-this-assignment
const self = this; // well then...
const modelName = (subClass as any).modelName;
if (!modelName) {
throw new Error(
'A class passed to nohm.register() did not have static a modelName property.',
);
}
if (!(subClass as any).definitions) {
throw new Error(
'A class passed to nohm.register() did not have static property definitions.',
);
}
debug(
'Registering new model using register().',
modelName,
(subClass as any).definitions,
temp,
);
let metaVersion = '';
// tslint:disable-next-line:max-classes-per-file
@staticImplements<IStaticMethods<CreatedClass>>()
class CreatedClass extends subClass {
protected nohmClass = self;
public readonly modelName = modelName;
constructor(...args: Array<any>) {
super(...args);
if (self.meta) {
if (!metaVersion) {
// cache it to prevent constant regeneration
metaVersion = this.generateMetaVersion();
}
this.meta = {
inDb: false,
properties: this.options.properties,
version: metaVersion,
};
}
}
/* This (and .model()) is the only place where this method should exist.
An alternative would be to pass the options as a special argument to super, but that would have the downside
of making subclasses of subclasses impossible. */
protected _initOptions() {
this.options = {
idGenerator: (subClass as any).idGenerator,
properties: {},
};
if (!this.client) {
this.client = self.client;
}
this.meta = {
inDb: false,
properties: {},
version: '',
};
if (!this.options.idGenerator) {
this.options.idGenerator = 'default';
}
}
protected prefix(prefix: keyof INohmPrefixes): string {
return self.prefix[prefix] + this.modelName;
}
protected rawPrefix(): INohmPrefixes {
return self.prefix;
}
public getDefinitions(): {
[key: string]: IModelPropertyDefinition;
} {
const definitions = (CreatedClass as any).definitions;
if (!definitions) {
throw new Error(
`Model was not defined with proper static definitions: '${modelName}'`,
);
}
return definitions;
}
/**
* Loads a NohmModels via the given id.
*
* @param {*} id ID of the model to be loaded
* @returns {Promise<NohmModel|void>}
*/
public static async load<P extends NohmModel>(id: any): Promise<P> {
const model = await self.factory<P>(modelName);
await model.load(id);
return model;
}
/**
* Loads an Array of NohmModels via the given ids. Any ids that do not exist will just be ignored.
*
* @param {Array<string>} ids Array of IDs of the models to be loaded
* @returns {Promise<NohmModel>}
*/
public static async loadMany<P extends NohmModel>(
ids: Array<string>,
): Promise<Array<P>> {
if (!Array.isArray(ids) || ids.length === 0) {
return [];
}
const loadPromises = ids.map(async (id) => {
try {
return await self.factory(modelName, id);
} catch (err) {
if (err && err.message === 'not found') {
return;
} else {
throw err;
}
}
});
const loadedModels = await Promise.all(loadPromises);
return loadedModels.filter<P>(
(model): model is P => typeof model !== 'undefined',
);
}
/**
* Finds ids of objects and loads them into full NohmModels.
*
* @param {ISearchOptions} searches
* @returns {Promise<Array<NohmModel>>}
*/
public static async findAndLoad<
P extends NohmModel,
TProps extends IDictionary
>(
searches?: Partial<
{
[key in keyof TProps]:
| string
| number
| boolean
| Partial<ISearchOption>;
}
>,
): Promise<Array<P>> {
const dummy = await self.factory<P>(modelName);
const ids = await dummy.find(searches);
if (ids.length === 0) {
return [];
}
const loadPromises = ids.map((id) => {
return self.factory<P>(dummy.modelName, id);
});
return Promise.all(loadPromises);
}
/**
* Sort the given ids or all stored ids by their SortOptions
*
* @see NohmModel.sort
* @static
* @param {ISortOptions<IDictionary>} [sortOptions={}] Search options
* @returns {Promise<Array<string>>} Array of ids
*/
public static async sort(
options: ISortOptions<IDictionary> = {},
ids: Array<string | number> | false = false,
): Promise<Array<string>> {
const dummy = await self.factory(modelName);
return dummy.sort(options, ids);
}
/**
* Search for ids
*
* @see NohmModel.find
* @static
* @param {ISearchOptions} [searches={}] Search options
* @returns {Promise<Array<string>>} Array of ids
*/
public static async find<TProps extends IDictionary>(
searches: Partial<
{
[key in keyof TProps]:
| string
| number
| boolean
| Partial<ISearchOption>;
}
> = {},
): Promise<Array<string>> {
const dummy = await self.factory(modelName);
return dummy.find(searches);
}
/**
* Loads a NohmModels via the given id.
*
* @param {*} id ID of the model to be loaded
* @returns {Promise<NohmModel>}
*/
public static async remove(id: any, silent?: boolean): Promise<void> {
const model = await self.factory(modelName);
model.id = id;
await model.remove(silent);
}
}
if (!temp) {
this.modelCache[modelName] = CreatedClass;
}
return CreatedClass;
}
/**
* Get all model classes that are registered via .register() or .model()
*
* @returns {Array<NohmModelStatic>}
*/
public getModels() {
return this.modelCache;
}
/**
* Creates a new instance of the model with the given modelName.
* When given an id as second parameter it also loads it.
*
* @param {string} name Name of the model, must match the modelName of one of your defined models.
* @param {*} [id] ID of a record you want to load.
* @returns {Promise<NohmModel>}
* @throws {Error('Model %name not found.')} Rejects when there is no registered model with the given modelName.
* @throws {Error('not found')} If no record exists of the given id,
* an error is thrown with the message 'not found'
* @memberof NohmClass
*/
public async factory<T extends NohmModel<any>>(
name: string,
id?: any,
): Promise<T> {
if (
typeof arguments[1] === 'function' ||
typeof arguments[2] === 'function'
) {
throw new Error(
'Not implemented: factory does not support callback method anymore.',
);
} else {
debug(`Factory is creating a new instance of '%s' with id %s.`, name, id);
const model = this.modelCache[name];
if (!model) {
throw new Error(`Model '${name}' not found.`);
}
const instance = new model() as T;
if (id) {
await instance.load(id);
return instance;
} else {
return instance;
}
}
}
/**
* DO NOT USE THIS UNLESS YOU ARE ABSOLUTELY SURE ABOUT IT!
*
* Deletes any keys from the db that start with the set nohm prefixes.
*
* DO NOT USE THIS UNLESS YOU ARE ABSOLUTELY SURE ABOUT IT!
*
* @param {Object} [client] You can specify the redis client to use. Default: Nohm.client
*/
public async purgeDb(client: redis.RedisClient = this.client): Promise<void> {
async function delKeys(prefix: string) {
const foundKeys = await keys(client, prefix + '*');
if (foundKeys.length === 0) {
return;
}
await del(client, foundKeys);
}
const deletes: Array<Promise<void>> = [];
debug(
`PURGING DATABASE!`,
client && client.connected,
client && (client as any).address,
this.prefix,
);
Object.keys(this.prefix).forEach((key) => {
const prefix = (this.prefix as any)[key];
if (typeof prefix === 'object') {
Object.keys(prefix).forEach((innerKey) => {
const innerPrefix = prefix[innerKey];
deletes.push(delKeys(innerPrefix));
});
} else {
deletes.push(delKeys(prefix));
}
});
await Promise.all(deletes);
}
public setExtraValidations(files: string | Array<string>) {
debug(`Setting extra validation files`, files);
if (!Array.isArray(files)) {
files = [files];
}
files.forEach((path) => {
if (this.extraValidators.indexOf(path) === -1) {
this.extraValidators.push(path);
const validators = require(path);
Object.keys(validators).forEach((_name) => {
// TODO for v1: check if this needs to be implemented
// this.__validators[name] = validators[name];
});
}
});
}
public getExtraValidatorFileNames(): Array<string> {
return this.extraValidators;
}
public middleware(options: IMiddlewareOptions): TRequestHandler {
return middleware(options, this);
}
public getPublish(): boolean {
return this.publish;
}
public setPublish(publish: boolean) {
debug(`Setting publish mode to '%o'.`, !!publish);
this.publish = !!publish;
}
public getPubSubClient(): redis.RedisClient {
return this.publishClient;
}
public setPubSubClient(client: redis.RedisClient): Promise<void> {
debug(
`Setting pubSub client. Connected: '%s'; Address: '%s'.`,
client && client.connected,
client && (client as any).address,
);
this.publishClient = client;
return this.initPubSub();
}
private async initPubSub(): Promise<void> {
if (!this.getPubSubClient()) {
throw new Error(
'A second redis client must set via nohm.setPubSubClient before using pub/sub methods.',
);
} else if (this.isPublishSubscribed === true) {
// already in pubsub mode, don't need to initialize it again.
return;
}
this.publishEventEmitter = new EventEmitter();
this.publishEventEmitter.setMaxListeners(0); // TODO: check if this is sensible
this.isPublishSubscribed = true;
await psubscribe(
this.publishClient,
this.prefix.channel + PUBSUB_ALL_PATTERN,
);
debugPubSub(
`Redis PSUBSCRIBE for '%s'.`,
this.prefix.channel + PUBSUB_ALL_PATTERN,
);
this.publishClient.on('pmessage', (_pattern, channel, message) => {
const suffix = channel.slice(this.prefix.channel.length);
const parts = suffix.match(/([^:]+)/g); // Pattern = _prefix_:channel:_modelname_:_action_
if (!parts) {
this.logError(`An erroneous channel has been captured: ${channel}.`);
return;
}
const modelName = parts[0];
const action = parts[1];
let payload = {};
try {
payload = message ? JSON.parse(message) : {};
debugPubSub(
`Redis published message for model '%s' with action '%s' and message: '%j'.`,
modelName,
action,
payload,
);
} catch (e) {
this.logError(
`A published message is not valid JSON. Was : "${message}"`,
);
return;
}
this.publishEventEmitter.emit(`${modelName}:${action}`, payload);
});
}
public async subscribeEvent(
eventName: string,
callback: (payload: any) => void,
): Promise<void> {
await this.initPubSub();
debugPubSub(`Redis subscribing to event '%s'.`, eventName);
this.publishEventEmitter.on(eventName, callback);
}
public async subscribeEventOnce(
eventName: string,
callback: (payload: any) => void,
): Promise<void> {
await this.initPubSub();
debugPubSub(`Redis subscribing once to event '%s'.`, eventName);
this.publishEventEmitter.once(eventName, callback);
}
public unsubscribeEvent(eventName: string, fn?: any): void {
if (this.publishEventEmitter) {
debugPubSub(
`Redis unsubscribing from event '%s' with fn?: %s.`,
eventName,
fn,
);
if (!fn) {
this.publishEventEmitter.removeAllListeners(eventName);
} else {
this.publishEventEmitter.removeListener(eventName, fn);
}
}
}
public async closePubSub(): Promise<redis.RedisClient> {
if (this.isPublishSubscribed === true) {
debugPubSub(
`Redis PUNSUBSCRIBE for '%s'.`,
this.prefix.channel + PUBSUB_ALL_PATTERN,
);
this.isPublishSubscribed = false;
await punsubscribe(
this.publishClient,
this.prefix.channel + PUBSUB_ALL_PATTERN,
);
}
return this.publishClient;
}
}
const nohm = new NohmClass({});
export default nohm; | the_stack |
import * as ieee754 from './ieee754'
import * as base64 from './base64'
// Based on http://stackoverflow.com/a/22747272/680742, the browser with the
// lowest limit is Chrome, with 0x10000 args. We go 1 magnitude less, for
// safety
const MAX_ARGUMENTS_LENGTH = 0x1000
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g
const INSPECT_MAX_BYTES = 50
const K_MAX_LENGTH = 0x7fffffff
/** Buffer encoding options */
export type Encoding =
| 'hex'
| 'utf8'
| 'utf-8'
| 'ascii'
| 'latin1'
| 'binary'
| 'base64'
| 'ucs2'
| 'ucs-2'
| 'utf16le'
| 'utf-16le'
/**
* A Buffer implementation mirrors the Buffer type from NodeJS. Provides buffer
* read/write functionality for native numeric types, slicing and concat as
* well as text encoding functionality. Ported from to TypeScript from
* https://github.com/feross/buffer.
*/
export class Buffer extends Uint8Array {
// #region swap
private static swap(buf: Buffer, n: number, m: number) {
const i = buf[n]
buf[n] = buf[m]
buf[m] = i
}
public swap16(): Buffer {
const len = this.length
if (len % 2 !== 0) {
throw new RangeError('Buffer size must be a multiple of 16-bits')
}
for (let i = 0; i < len; i += 2) {
Buffer.swap(this, i, i + 1)
}
return this
}
public swap32(): Buffer {
const len = this.length
if (len % 4 !== 0) {
throw new RangeError('Buffer size must be a multiple of 32-bits')
}
for (let i = 0; i < len; i += 4) {
Buffer.swap(this, i, i + 3)
Buffer.swap(this, i + 1, i + 2)
}
return this
}
public swap64() {
const len = this.length
if (len % 8 !== 0) {
throw new RangeError('Buffer size must be a multiple of 64-bits')
}
for (let i = 0; i < len; i += 8) {
Buffer.swap(this, i, i + 7)
Buffer.swap(this, i + 1, i + 6)
Buffer.swap(this, i + 2, i + 5)
Buffer.swap(this, i + 3, i + 4)
}
return this
}
// #region toString
private slowToString(...args: any[]): string {
let loweredCase = false
// No need to verify that "this.length <= MAX_UINT32" since it's a read-only
// property of a typed array.
// This behaves neither like String nor Uint8Array in that we set start/end
// to their upper/lower bounds if the value passed is out of range.
// undefined is handled specially as per ECMA-262 6th Edition,
// Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization.
if (args[1] === undefined || args[1] < 0) {
args[1] = 0
}
// Return early if start > this.length. Done here to prevent potential uint32
// coercion fail below.
if (args[1] > this.length) {
return ''
}
if (args[2] === undefined || args[2] > this.length) {
args[2] = this.length
}
if (args[2] <= 0) {
return ''
}
// Force coersion to uint32. This will also coerce falsey/NaN values to 0.
args[2] >>>= 0
args[1] >>>= 0
if (args[2] <= args[1]) {
return ''
}
if (!args[0]) {
args[0] = 'utf8'
}
while (true) {
switch (args[0]) {
case 'hex':
return Buffer.hexSlice(this, args[1], args[2])
case 'utf8':
case 'utf-8':
return Buffer.utf8Slice(this, args[1], args[2])
case 'ascii':
return Buffer.asciiSlice(this, args[1], args[2])
case 'latin1':
case 'binary':
return Buffer.latin1Slice(this, args[1], args[2])
case 'base64':
return Buffer.base64Slice(this, args[1], args[2])
case 'ucs2':
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
return Buffer.utf16leSlice(this, args[1], args[2])
default:
if (loweredCase) {
throw new TypeError('Unknown encoding: ' + args[0])
}
args[0] = (args[0] + '').toLowerCase()
loweredCase = true
}
}
}
public toString(encoding?: string, start?: number, end?: number): string
public toString(...args: any[]): string {
const length = this.length
if (length === 0) {
return ''
} else if (args.length === 0) {
return Buffer.utf8Slice(this, 0, length)
} else {
return this.slowToString(...args)
}
}
public toLocaleString(encoding?: string, start?: number, end?: number): string
public toLocaleString(...args: any[]): string {
return this.toString(...args)
}
// #region equals
public equals(buf: Buffer): boolean {
if (!Buffer.isBuffer(buf)) {
throw new TypeError('Argument must be a Buffer')
}
if (this === buf) return true
return Buffer.compare(this, buf) === 0
}
// #region inspect
public inspect(): string {
let str = ''
const max = INSPECT_MAX_BYTES
str = this.toString('hex', 0, max)
.replace(/(.{2})/g, '$1 ')
.trim()
if (this.length > max) str += ' ... '
return '<Buffer ' + str + '>'
}
// #region compare
public compare(
otherBuffer: Buffer,
targetStart?: number,
targetEnd?: number,
sourceStart?: number,
sourceEnd?: number
): number
public compare(...args: any[]): number {
if (Buffer.isInstance(args[0], Uint8Array)) {
args[0] = Buffer.from(args[0], args[0].offset, args[0].byteLength)
}
if (!Buffer.isBuffer(args[0])) {
throw new TypeError(
'The "target" argument must be one of type Buffer or Uint8Array. ' +
'Received type ' +
typeof args[0]
)
}
if (args[1] === undefined) {
args[1] = 0
}
if (args[2] === undefined) {
args[2] = args[0] ? args[0].length : 0
}
if (args[3] === undefined) {
args[3] = 0
}
if (args[4] === undefined) {
args[4] = this.length
}
if (
args[1] < 0 ||
args[2] > args[0].length ||
args[3] < 0 ||
args[4] > this.length
) {
throw new RangeError('out of range index')
}
if (args[3] >= args[4] && args[1] >= args[2]) {
return 0
}
if (args[3] >= args[4]) {
return -1
}
if (args[1] >= args[2]) {
return 1
}
args[1] >>>= 0
args[2] >>>= 0
args[3] >>>= 0
args[4] >>>= 0
if (this === args[0]) {
return 0
}
let x = args[4] - args[3]
let y = args[2] - args[1]
const len = Math.min(x, y)
const thisCopy = this.slice(args[3], args[4])
const targetCopy = args[0].slice(args[1], args[2])
for (let i = 0; i < len; ++i) {
if (thisCopy[i] !== targetCopy[i]) {
x = thisCopy[i]
y = targetCopy[i]
break
}
}
if (x < y) return -1
if (y < x) return 1
return 0
}
// #region indexOf
private static arrayIndexOf(
arr: Buffer,
val: any,
byteOffset: number,
encoding: Encoding,
dir: boolean
) {
let indexSize = 1
let arrLength = arr.length
let valLength = val.length
if (encoding !== undefined) {
encoding = String(encoding).toLowerCase() as Encoding
if (
encoding === 'ucs2' ||
encoding === 'ucs-2' ||
encoding === 'utf16le' ||
encoding === 'utf-16le'
) {
if (arr.length < 2 || val.length < 2) {
return -1
}
indexSize = 2
arrLength /= 2
valLength /= 2
byteOffset /= 2
}
}
function read(buf: Buffer, i: number) {
if (indexSize === 1) {
return buf[i]
} else {
return buf.readUInt16BE(i * indexSize)
}
}
let i
if (dir) {
let foundIndex = -1
for (i = byteOffset; i < arrLength; i++) {
if (
read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)
) {
if (foundIndex === -1) foundIndex = i
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize
} else {
if (foundIndex !== -1) i -= i - foundIndex
foundIndex = -1
}
}
} else {
if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength
for (i = byteOffset; i >= 0; i--) {
let found = true
for (let j = 0; j < valLength; j++) {
if (read(arr, i + j) !== read(val, j)) {
found = false
break
}
}
if (found) return i
}
}
return -1
}
private static bidirectionalIndexOf(
buffer: Buffer,
val: any,
byteOffset?: number,
encoding?: Encoding,
dir?: boolean
): number
private static bidirectionalIndexOf(...args: any[]): number {
// Empty buffer means no match
if (args[0].length === 0) return -1
// Normalize byteOffset
if (typeof args[2] === 'string') {
args[3] = args[2]
args[2] = 0
} else if (args[2] > 0x7fffffff) {
args[2] = 0x7fffffff
} else if (args[2] < -0x80000000) {
args[2] = -0x80000000
}
args[2] = +args[2] // Coerce to Number.
if (Buffer.numberIsNaN(args[2])) {
// byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer
args[2] = args[4] ? 0 : args[0].length - 1
}
// Normalize byteOffset: negative offsets start from the end of the buffer
if (args[2] < 0) args[2] = args[0].length + args[2]
if (args[2] >= args[0].length) {
if (args[4]) return -1
else args[2] = args[0].length - 1
} else if (args[2] < 0) {
if (args[4]) args[2] = 0
else return -1
}
// Normalize val
if (typeof args[1] === 'string') {
args[1] = Buffer.from(args[1], args[3])
}
// Finally, search either indexOf (if dir is true) or lastIndexOf
if (Buffer.isBuffer(args[1])) {
// Special case: looking for empty string/buffer always fails
if (args[1].length === 0) {
return -1
}
return Buffer.arrayIndexOf(args[0], args[1], args[2], args[3], args[4])
} else if (typeof args[1] === 'number') {
args[1] = args[1] & 0xff // Search for a byte value [0-255]
if (typeof Uint8Array.prototype.indexOf === 'function') {
if (args[4]) {
return Uint8Array.prototype.indexOf.call(args[0], args[1], args[2])
} else {
return Uint8Array.prototype.lastIndexOf.call(
args[0],
args[1],
args[2]
)
}
}
return Buffer.arrayIndexOf(args[0], [args[1]], args[2], args[3], args[4])
}
throw new TypeError('val must be string, number or Buffer')
}
public indexOf(val: any, byteOffset?: number, encoding?: Encoding): number {
return Buffer.bidirectionalIndexOf(this, val, byteOffset, encoding, true)
}
public lastIndexOf(
val: any,
byteOffset?: number,
encoding?: Encoding
): number {
return Buffer.bidirectionalIndexOf(this, val, byteOffset, encoding, false)
}
// #region includes
public includes(val: any, byteOffset?: number, encoding?: Encoding): boolean {
return this.indexOf(val, byteOffset, encoding) !== -1
}
// #region toJSON
public toJSON() {
const facade = this as any
return {
type: 'Buffer',
data: Array.prototype.slice.call(facade._arr || facade, 0)
}
}
// #region slice
private static base64Slice(buf: Buffer, start: number, end: number): string {
if (start === 0 && end === buf.length) {
return base64.fromByteArray(buf)
} else {
return base64.fromByteArray(buf.slice(start, end))
}
}
private static utf8Slice(buf: Buffer, start: number, end: number): string {
end = Math.min(buf.length, end)
const res = []
let i = start
while (i < end) {
const firstByte = buf[i]
let codePoint = null
let bytesPerSequence =
firstByte > 0xef ? 4 : firstByte > 0xdf ? 3 : firstByte > 0xbf ? 2 : 1
if (i + bytesPerSequence <= end) {
let secondByte, thirdByte, fourthByte, tempCodePoint
switch (bytesPerSequence) {
case 1:
if (firstByte < 0x80) {
codePoint = firstByte
}
break
case 2:
secondByte = buf[i + 1]
if ((secondByte & 0xc0) === 0x80) {
tempCodePoint = ((firstByte & 0x1f) << 0x6) | (secondByte & 0x3f)
if (tempCodePoint > 0x7f) {
codePoint = tempCodePoint
}
}
break
case 3:
secondByte = buf[i + 1]
thirdByte = buf[i + 2]
if ((secondByte & 0xc0) === 0x80 && (thirdByte & 0xc0) === 0x80) {
tempCodePoint =
((firstByte & 0xf) << 0xc) |
((secondByte & 0x3f) << 0x6) |
(thirdByte & 0x3f)
if (
tempCodePoint > 0x7ff &&
(tempCodePoint < 0xd800 || tempCodePoint > 0xdfff)
) {
codePoint = tempCodePoint
}
}
break
case 4:
secondByte = buf[i + 1]
thirdByte = buf[i + 2]
fourthByte = buf[i + 3]
if (
(secondByte & 0xc0) === 0x80 &&
(thirdByte & 0xc0) === 0x80 &&
(fourthByte & 0xc0) === 0x80
) {
tempCodePoint =
((firstByte & 0xf) << 0x12) |
((secondByte & 0x3f) << 0xc) |
((thirdByte & 0x3f) << 0x6) |
(fourthByte & 0x3f)
if (tempCodePoint > 0xffff && tempCodePoint < 0x110000) {
codePoint = tempCodePoint
}
}
}
}
if (codePoint === null) {
// we did not generate a valid codePoint so insert a
// replacement char (U+FFFD) and advance only 1 byte
codePoint = 0xfffd
bytesPerSequence = 1
} else if (codePoint > 0xffff) {
// encode to utf16 (surrogate pair dance)
codePoint -= 0x10000
res.push(((codePoint >>> 10) & 0x3ff) | 0xd800)
codePoint = 0xdc00 | (codePoint & 0x3ff)
}
res.push(codePoint)
i += bytesPerSequence
}
return Buffer.decodeCodePointsArray(res)
}
private static decodeCodePointsArray(codePoints: number[]): string {
const len = codePoints.length
if (len <= MAX_ARGUMENTS_LENGTH) {
return String.fromCharCode.apply(String, codePoints) // avoid extra slice()
}
// Decode in chunks to avoid "call stack size exceeded".
let res = ''
let i = 0
while (i < len) {
res += String.fromCharCode.apply(
String,
codePoints.slice(i, (i += MAX_ARGUMENTS_LENGTH))
)
}
return res
}
private static asciiSlice(buf: Buffer, start: number, end: number): string {
let ret = ''
end = Math.min(buf.length, end)
for (let i = start; i < end; ++i) {
ret += String.fromCharCode(buf[i] & 0x7f)
}
return ret
}
private static latin1Slice(buf: Buffer, start: number, end: number): string {
let ret = ''
end = Math.min(buf.length, end)
for (let i = start; i < end; ++i) {
ret += String.fromCharCode(buf[i])
}
return ret
}
private static hexSlice(buf: Buffer, start: number, end: number): string {
const len = buf.length
if (!start || start < 0) start = 0
if (!end || end < 0 || end > len) end = len
let out = ''
for (let i = start; i < end; ++i) {
out += Buffer.toHex(buf[i])
}
return out
}
private static utf16leSlice(buf: Buffer, start: number, end: number): string {
const bytes = buf.slice(start, end)
let res = ''
for (let i = 0; i < bytes.length; i += 2) {
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256)
}
return res
}
public slice(start: number, end?: number): Buffer {
const len = this.length
start = ~~start
end = end === undefined ? len : ~~end
if (start < 0) {
start += len
if (start < 0) start = 0
} else if (start > len) {
start = len
}
if (end < 0) {
end += len
if (end < 0) end = 0
} else if (end > len) {
end = len
}
if (end < start) end = start
// Return 'Uint8Array' augmented with 'Buffer' prototype.
let facade = this.subarray(start, end) as any
facade.__proto__ = Buffer.prototype
return facade
}
// #region copy
public copy(
target: Buffer,
targetStart: number,
start?: number,
end?: number
): number {
if (!Buffer.isBuffer(target))
throw new TypeError('argument should be a Buffer')
if (!start) start = 0
if (!end && end !== 0) end = this.length
if (targetStart >= target.length) targetStart = target.length
if (!targetStart) targetStart = 0
if (end > 0 && end < start) end = start
// Copy 0 bytes; we're done
if (end === start) return 0
if (target.length === 0 || this.length === 0) return 0
// Fatal error conditions
if (targetStart < 0) {
throw new RangeError('targetStart out of bounds')
}
if (start < 0 || start >= this.length)
throw new RangeError('Index out of range')
if (end < 0) throw new RangeError('sourceEnd out of bounds')
// Are we oob?
if (end > this.length) end = this.length
if (target.length - targetStart < end - start) {
end = target.length - targetStart + start
}
const len = end - start
if (
this === target &&
typeof Uint8Array.prototype.copyWithin === 'function'
) {
// Use built-in when available, missing from IE11
this.copyWithin(targetStart, start, end)
} else if (this === target && start < targetStart && targetStart < end) {
// descending copy from end
for (let i = len - 1; i >= 0; --i) {
target[i + targetStart] = this[i + start]
}
} else {
Uint8Array.prototype.set.call(
target,
this.subarray(start, end),
targetStart
)
}
return len
}
// #region fill
public fill(value: number, encoding: string): this // questionable call in Buffer.alloc()
public fill(value: any, offset?: number, end?: number): this
public fill(...args: any[]): this {
// Handle string cases:
if (typeof args[0] === 'string') {
if (typeof args[1] === 'string') {
args[3] = args[1]
args[1] = 0
args[2] = this.length
} else if (typeof args[2] === 'string') {
args[3] = args[2]
args[2] = this.length
}
if (args[3] !== undefined && typeof args[3] !== 'string') {
throw new TypeError('encoding must be a string')
}
if (typeof args[3] === 'string' && !Buffer.isEncoding(args[3] as Encoding)) {
throw new TypeError('Unknown encoding: ' + args[3])
}
if (args[0].length === 1) {
const code = args[0].charCodeAt(0)
if ((args[3] === 'utf8' && code < 128) || args[3] === 'latin1') {
// Fast path: If `val` fits into a single byte, use that numeric value.
args[0] = code
}
}
} else if (typeof args[0] === 'number') {
args[0] = args[0] & 255
}
// Invalid ranges are not set to a default, so can range check early.
if (args[1] < 0 || this.length < args[1] || this.length < args[2]) {
throw new RangeError('Out of range index')
}
if (args[2] <= args[1]) {
return this
}
args[1] = args[1] >>> 0
args[2] = args[2] === undefined ? this.length : args[2] >>> 0
if (!args[0]) args[0] = 0
let i
if (typeof args[0] === 'number') {
for (i = args[1]; i < args[2]; ++i) {
this[i] = args[0]
}
} else {
const bytes = Buffer.isBuffer(args[0])
? args[0]
: Buffer.from(args[0], args[3])
const len = bytes.length
if (len === 0) {
throw new TypeError(
'The value "' + args[0] + '" is invalid for argument "value"'
)
}
for (i = 0; i < args[2] - args[1]; ++i) {
this[i + args[1]] = bytes[i % len]
}
}
return this
}
// #region read: numerics
private static checkOffset(offset: number, ext: number, length: number) {
if (offset % 1 !== 0 || offset < 0)
throw new RangeError('offset is not uint')
if (offset + ext > length)
throw new RangeError('Trying to access beyond Buffer length')
}
public readUIntLE(offset: number, byteLength: number, noAssert?: boolean) {
offset = offset >>> 0
byteLength = byteLength >>> 0
if (!noAssert) {
Buffer.checkOffset(offset, byteLength, this.length)
}
let val = this[offset]
let mul = 1
let i = 0
while (++i < byteLength && (mul *= 0x100)) {
val += this[offset + i] * mul
}
return val
}
public readUIntBE(offset: number, byteLength: number, noAssert?: boolean) {
offset = offset >>> 0
byteLength = byteLength >>> 0
if (!noAssert) {
Buffer.checkOffset(offset, byteLength, this.length)
}
let val = this[offset + --byteLength]
let mul = 1
while (byteLength > 0 && (mul *= 0x100)) {
val += this[offset + --byteLength] * mul
}
return val
}
public readUInt8(offset: number, noAssert?: boolean) {
offset = offset >>> 0
if (!noAssert) {
Buffer.checkOffset(offset, 1, this.length)
}
return this[offset]
}
public readUInt16LE(offset: number, noAssert?: boolean) {
offset = offset >>> 0
if (!noAssert) {
Buffer.checkOffset(offset, 2, this.length)
}
return this[offset] | (this[offset + 1] << 8)
}
public readUInt16BE(offset: number, noAssert?: boolean) {
offset = offset >>> 0
if (!noAssert) {
Buffer.checkOffset(offset, 2, this.length)
}
return (this[offset] << 8) | this[offset + 1]
}
public readUInt32LE(offset: number, noAssert?: boolean) {
offset = offset >>> 0
if (!noAssert) {
Buffer.checkOffset(offset, 4, this.length)
}
return (
(this[offset] | (this[offset + 1] << 8) | (this[offset + 2] << 16)) +
this[offset + 3] * 0x1000000
)
}
public readUInt32BE(offset: number, noAssert?: boolean) {
offset = offset >>> 0
if (!noAssert) {
Buffer.checkOffset(offset, 4, this.length)
}
return (
this[offset] * 0x1000000 +
((this[offset + 1] << 16) | (this[offset + 2] << 8) | this[offset + 3])
)
}
public readIntLE(offset: number, byteLength: number, noAssert?: boolean) {
offset = offset >>> 0
byteLength = byteLength >>> 0
if (!noAssert) {
Buffer.checkOffset(offset, byteLength, this.length)
}
let val = this[offset]
let mul = 1
let i = 0
while (++i < byteLength && (mul *= 0x100)) {
val += this[offset + i] * mul
}
mul *= 0x80
if (val >= mul) {
val -= Math.pow(2, 8 * byteLength)
}
return val
}
public readIntBE(offset: number, byteLength: number, noAssert?: boolean) {
offset = offset >>> 0
byteLength = byteLength >>> 0
if (!noAssert) {
Buffer.checkOffset(offset, byteLength, this.length)
}
let i = byteLength
let mul = 1
let val = this[offset + --i]
while (i > 0 && (mul *= 0x100)) {
val += this[offset + --i] * mul
}
mul *= 0x80
if (val >= mul) {
val -= Math.pow(2, 8 * byteLength)
}
return val
}
public readInt8(offset: number, noAssert?: boolean) {
offset = offset >>> 0
if (!noAssert) {
Buffer.checkOffset(offset, 1, this.length)
}
if (!(this[offset] & 0x80)) {
return this[offset]
}
return (0xff - this[offset] + 1) * -1
}
public readInt16LE(offset: number, noAssert?: boolean) {
offset = offset >>> 0
if (!noAssert) {
Buffer.checkOffset(offset, 2, this.length)
}
const val = this[offset] | (this[offset + 1] << 8)
return val & 0x8000 ? val | 0xffff0000 : val
}
public readInt16BE(offset: number, noAssert?: boolean) {
offset = offset >>> 0
if (!noAssert) {
Buffer.checkOffset(offset, 2, this.length)
}
const val = this[offset + 1] | (this[offset] << 8)
return val & 0x8000 ? val | 0xffff0000 : val
}
public readInt32LE(offset: number, noAssert?: boolean) {
offset = offset >>> 0
if (!noAssert) {
Buffer.checkOffset(offset, 4, this.length)
}
return (
this[offset] |
(this[offset + 1] << 8) |
(this[offset + 2] << 16) |
(this[offset + 3] << 24)
)
}
public readInt32BE(offset: number, noAssert?: boolean) {
offset = offset >>> 0
if (!noAssert) {
Buffer.checkOffset(offset, 4, this.length)
}
return (
(this[offset] << 24) |
(this[offset + 1] << 16) |
(this[offset + 2] << 8) |
this[offset + 3]
)
}
public readFloatLE(offset: number, noAssert?: boolean) {
offset = offset >>> 0
if (!noAssert) {
Buffer.checkOffset(offset, 4, this.length)
}
const facade = this as any
return ieee754.read(facade, offset, true, 23, 4)
}
public readFloatBE(offset: number, noAssert?: boolean) {
offset = offset >>> 0
if (!noAssert) {
Buffer.checkOffset(offset, 4, this.length)
}
const facade = this as any
return ieee754.read(facade, offset, false, 23, 4)
}
public readDoubleLE(offset: number, noAssert?: boolean) {
offset = offset >>> 0
if (!noAssert) {
Buffer.checkOffset(offset, 8, this.length)
}
const facade = this as any
return ieee754.read(facade, offset, true, 52, 8)
}
public readDoubleBE(offset: number, noAssert?: boolean) {
offset = offset >>> 0
if (!noAssert) {
Buffer.checkOffset(offset, 8, this.length)
}
const facade = this as any
return ieee754.read(facade, offset, false, 52, 8)
}
// #region write: string
private static hexWrite(
buf: Buffer,
string: string,
offset: number,
length: number
): number {
offset = Number(offset) || 0
const remaining = buf.length - offset
if (!length) {
length = remaining
} else {
length = Number(length)
if (length > remaining) {
length = remaining
}
}
const strLen = string.length
if (length > strLen / 2) {
length = strLen / 2
}
for (var i = 0; i < length; ++i) {
const parsed = parseInt(string.substr(i * 2, 2), 16)
if (Buffer.numberIsNaN(parsed)) return i
buf[offset + i] = parsed
}
return i
}
private static utf8Write(
buf: Buffer,
str: string,
offset: number,
length: number
) {
return Buffer.blitBuffer(
Buffer.utf8ToBytes(str, buf.length - offset),
buf,
offset,
length
)
}
private static asciiWrite(
buf: Buffer,
str: string,
offset: number,
length: number
) {
return Buffer.blitBuffer(Buffer.asciiToBytes(str), buf, offset, length)
}
private static latin1Write(
buf: Buffer,
str: string,
offset: number,
length: number
) {
return Buffer.asciiWrite(buf, str, offset, length)
}
private static base64Write(
buf: Buffer,
str: string,
offset: number,
length: number
) {
return Buffer.blitBuffer(Buffer.base64ToBytes(str), buf, offset, length)
}
private static ucs2Write(
buf: Buffer,
str: string,
offset: number,
length: number
) {
return Buffer.blitBuffer(
Buffer.utf16leToBytes(str, buf.length - offset),
buf,
offset,
length
)
}
public write(str: string, encoding: Encoding): number
public write(
string: string,
offset?: number,
length?: number,
encoding?: Encoding
): number
public write(...args: any[]): number {
// Buffer#write(string)
if (args[1] === undefined) {
args[3] = 'utf8'
args[2] = this.length
args[1] = 0
// Buffer#write(string, encoding)
} else if (args[2] === undefined && typeof args[1] === 'string') {
args[3] = args[1]
args[2] = this.length
args[1] = 0
// Buffer#write(string, offset[, length][, encoding])
} else if (isFinite(args[1])) {
args[1] = args[1] >>> 0
if (isFinite(args[2])) {
args[2] = args[2] >>> 0
if (args[3] === undefined) args[3] = 'utf8'
} else {
args[3] = args[2]
args[2] = undefined
}
} else {
throw new Error(
'Buffer.write(string, encoding, offset[, length]) is no longer supported'
)
}
const remaining = this.length - args[1]
if (args[2] === undefined || args[2] > remaining) args[2] = remaining
if (
(args[0].length > 0 && (args[2] < 0 || args[1] < 0)) ||
args[1] > this.length
) {
throw new RangeError('Attempt to write outside Buffer bounds')
}
if (!args[3]) {
args[3] = 'utf8'
}
let loweredCase = false
for (;;) {
switch (args[3]) {
case 'hex':
return Buffer.hexWrite(this, args[0], args[1], args[2])
case 'utf8':
case 'utf-8':
return Buffer.utf8Write(this, args[0], args[1], args[2])
case 'ascii':
return Buffer.asciiWrite(this, args[0], args[1], args[2])
case 'latin1':
case 'binary':
return Buffer.latin1Write(this, args[0], args[1], args[2])
case 'base64':
// Warning: maxLength not taken into account in base64Write
return Buffer.base64Write(this, args[0], args[1], args[2])
case 'ucs2':
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
return Buffer.ucs2Write(this, args[0], args[1], args[2])
default:
if (loweredCase) {
throw new TypeError('Unknown encoding: ' + args[3])
}
args[3] = ('' + args[3]).toLowerCase()
loweredCase = true
}
}
}
// #region write: numerics
private static checkInt(
buf: Buffer,
value: number,
offset: number,
ext: number,
max: number,
min: number
) {
if (!Buffer.isBuffer(buf))
throw new TypeError('"Buffer" argument must be a Buffer instance')
if (value > max || value < min)
throw new RangeError('"value" argument is out of bounds')
if (offset + ext > buf.length) throw new RangeError('Index out of range')
}
public writeUIntLE(
value: number,
offset: number,
byteLength: number,
noAssert?: number
) {
value = +value
offset = offset >>> 0
byteLength = byteLength >>> 0
if (!noAssert) {
const maxBytes = Math.pow(2, 8 * byteLength) - 1
Buffer.checkInt(this, value, offset, byteLength, maxBytes, 0)
}
let mul = 1
let i = 0
this[offset] = value & 0xff
while (++i < byteLength && (mul *= 0x100)) {
this[offset + i] = (value / mul) & 0xff
}
return offset + byteLength
}
public writeUIntBE(
value: number,
offset: number,
byteLength: number,
noAssert?: number
) {
value = +value
offset = offset >>> 0
byteLength = byteLength >>> 0
if (!noAssert) {
const maxBytes = Math.pow(2, 8 * byteLength) - 1
Buffer.checkInt(this, value, offset, byteLength, maxBytes, 0)
}
let i = byteLength - 1
let mul = 1
this[offset + i] = value & 0xff
while (--i >= 0 && (mul *= 0x100)) {
this[offset + i] = (value / mul) & 0xff
}
return offset + byteLength
}
public writeUInt8(value: number, offset: number, noAssert?: boolean) {
value = +value
offset = offset >>> 0
if (!noAssert) {
Buffer.checkInt(this, value, offset, 1, 0xff, 0)
}
this[offset] = value & 0xff
return offset + 1
}
public writeUInt16LE(value: number, offset: number, noAssert?: boolean) {
value = +value
offset = offset >>> 0
if (!noAssert) {
Buffer.checkInt(this, value, offset, 2, 0xffff, 0)
}
this[offset] = value & 0xff
this[offset + 1] = value >>> 8
return offset + 2
}
public writeUInt16BE(value: number, offset: number, noAssert?: boolean) {
value = +value
offset = offset >>> 0
if (!noAssert) {
Buffer.checkInt(this, value, offset, 2, 0xffff, 0)
}
this[offset] = value >>> 8
this[offset + 1] = value & 0xff
return offset + 2
}
public writeUInt32LE(value: number, offset: number, noAssert?: boolean) {
value = +value
offset = offset >>> 0
if (!noAssert) {
Buffer.checkInt(this, value, offset, 4, 0xffffffff, 0)
}
this[offset + 3] = value >>> 24
this[offset + 2] = value >>> 16
this[offset + 1] = value >>> 8
this[offset] = value & 0xff
return offset + 4
}
public writeUInt32BE(value: number, offset: number, noAssert?: boolean) {
value = +value
offset = offset >>> 0
if (!noAssert) {
Buffer.checkInt(this, value, offset, 4, 0xffffffff, 0)
}
this[offset] = value >>> 24
this[offset + 1] = value >>> 16
this[offset + 2] = value >>> 8
this[offset + 3] = value & 0xff
return offset + 4
}
public writeIntLE(
value: number,
offset: number,
byteLength: number,
noAssert?: boolean
) {
value = +value
offset = offset >>> 0
if (!noAssert) {
const limit = Math.pow(2, 8 * byteLength - 1)
Buffer.checkInt(this, value, offset, byteLength, limit - 1, -limit)
}
let i = 0
let mul = 1
let sub = 0
this[offset] = value & 0xff
while (++i < byteLength && (mul *= 0x100)) {
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
sub = 1
}
this[offset + i] = (((value / mul) >> 0) - sub) & 0xff
}
return offset + byteLength
}
public writeIntBE(
value: number,
offset: number,
byteLength: number,
noAssert?: boolean
) {
value = +value
offset = offset >>> 0
if (!noAssert) {
const limit = Math.pow(2, 8 * byteLength - 1)
Buffer.checkInt(this, value, offset, byteLength, limit - 1, -limit)
}
let i = byteLength - 1
let mul = 1
let sub = 0
this[offset + i] = value & 0xff
while (--i >= 0 && (mul *= 0x100)) {
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
sub = 1
}
this[offset + i] = (((value / mul) >> 0) - sub) & 0xff
}
return offset + byteLength
}
public writeInt8(value: number, offset: number, noAssert?: boolean): number {
value = +value
offset = offset >>> 0
if (!noAssert) {
Buffer.checkInt(this, value, offset, 1, 0x7f, -0x80)
}
if (value < 0) value = 0xff + value + 1
this[offset] = value & 0xff
return offset + 1
}
public writeInt16LE(
value: number,
offset: number,
noAssert?: boolean
): number {
value = +value
offset = offset >>> 0
if (!noAssert) {
Buffer.checkInt(this, value, offset, 2, 0x7fff, -0x8000)
}
this[offset] = value & 0xff
this[offset + 1] = value >>> 8
return offset + 2
}
public writeInt16BE(
value: number,
offset: number,
noAssert?: boolean
): number {
value = +value
offset = offset >>> 0
if (!noAssert) {
Buffer.checkInt(this, value, offset, 2, 0x7fff, -0x8000)
}
this[offset] = value >>> 8
this[offset + 1] = value & 0xff
return offset + 2
}
public writeInt32LE(
value: number,
offset: number,
noAssert?: boolean
): number {
value = +value
offset = offset >>> 0
if (!noAssert) {
Buffer.checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)
}
this[offset] = value & 0xff
this[offset + 1] = value >>> 8
this[offset + 2] = value >>> 16
this[offset + 3] = value >>> 24
return offset + 4
}
public writeInt32BE(
value: number,
offset: number,
noAssert?: boolean
): number {
value = +value
offset = offset >>> 0
if (!noAssert) {
Buffer.checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)
}
if (value < 0) value = 0xffffffff + value + 1
this[offset] = value >>> 24
this[offset + 1] = value >>> 16
this[offset + 2] = value >>> 8
this[offset + 3] = value & 0xff
return offset + 4
}
private static checkIEEE754(
buf: Buffer,
value: number,
offset: number,
ext: number,
max: number,
min: number
) {
if (offset + ext > buf.length) throw new RangeError('Index out of range')
if (offset < 0) throw new RangeError('Index out of range')
}
private static writeFloat(
buf: Buffer,
value: number,
offset: number,
littleEndian: boolean,
noAssert?: boolean
): number {
value = +value
offset = offset >>> 0
if (!noAssert) {
Buffer.checkIEEE754(
buf,
value,
offset,
4,
3.4028234663852886e38,
-3.4028234663852886e38
)
}
ieee754.write(buf, value, offset, littleEndian, 23, 4)
return offset + 4
}
public writeFloatLE(
value: number,
offset: number,
noAssert?: boolean
): number {
return Buffer.writeFloat(this, value, offset, true, noAssert)
}
public writeFloatBE(
value: number,
offset: number,
noAssert?: boolean
): number {
return Buffer.writeFloat(this, value, offset, false, noAssert)
}
private static writeDouble(
buf: Buffer,
value: number,
offset: number,
littleEndian: boolean,
noAssert?: boolean
): number {
value = +value
offset = offset >>> 0
if (!noAssert) {
Buffer.checkIEEE754(
buf,
value,
offset,
8,
1.7976931348623157e308,
-1.7976931348623157e308
)
}
ieee754.write(buf, value, offset, littleEndian, 52, 8)
return offset + 8
}
public writeDoubleLE(
value: number,
offset: number,
noAssert?: boolean
): number {
return Buffer.writeDouble(this, value, offset, true, noAssert)
}
public writeDoubleBE(
value: number,
offset: number,
noAssert?: boolean
): number {
return Buffer.writeDouble(this, value, offset, false, noAssert)
}
// #region static: alloc
public static allocUnsafe(size: number): Buffer {
Buffer.assertSize(size)
return Buffer.createBuffer(size < 0 ? 0 : Buffer.checked(size) | 0)
}
public static alloc(
size: number,
fill?: number,
encoding?: Encoding
): Buffer {
Buffer.assertSize(size)
if (size <= 0) {
return Buffer.createBuffer(size)
}
if (fill !== undefined) {
// Only pay attention to encoding if it's a string. This
// prevents accidentally sending in a number that would
// be interpretted as a start offset.
return typeof encoding === 'string'
? Buffer.createBuffer(size).fill(fill, encoding)
: Buffer.createBuffer(size).fill(fill)
}
return Buffer.createBuffer(size)
}
// #region static: from
private static fromString(str: string, encoding: Encoding | '') {
if (typeof encoding !== 'string' || encoding === '') {
encoding = 'utf8'
}
if (!Buffer.isEncoding(encoding)) {
throw new TypeError('Unknown encoding: ' + encoding)
}
const length = Buffer.byteLength(str, encoding) | 0
let buf = Buffer.createBuffer(length)
const actual = buf.write(str, encoding)
if (actual !== length) {
// Writing a hex string, for example, that contains invalid characters will
// cause everything after the first invalid character to be ignored. (e.g.
// 'abxxcd' will be treated as 'ab')
buf = buf.slice(0, actual)
}
return buf
}
private static fromArrayLike(array: ArrayLike<number>): Buffer {
const length = array.length < 0 ? 0 : Buffer.checked(array.length) | 0
const buf = Buffer.createBuffer(length)
for (let i = 0; i < length; i += 1) {
buf[i] = array[i] & 255
}
return buf
}
private static fromArrayBuffer(
array: ArrayBuffer,
byteOffset: number,
length: number
): Buffer {
if (byteOffset < 0 || array.byteLength < byteOffset) {
throw new RangeError('"offset" is outside of buffer bounds')
}
if (array.byteLength < byteOffset + (length || 0)) {
throw new RangeError('"length" is outside of buffer bounds')
}
let buf
if (byteOffset === undefined && length === undefined) {
buf = new Uint8Array(array)
} else if (length === undefined) {
buf = new Uint8Array(array, byteOffset)
} else {
buf = new Uint8Array(array, byteOffset, length)
}
// Return an augmented `Uint8Array` instance
const facade = buf as any
facade.__proto__ = Buffer.prototype
return facade
}
private static fromObject(obj: any): Buffer {
if (Buffer.isBuffer(obj)) {
const len = Buffer.checked(obj.length) | 0
const buf = Buffer.createBuffer(len)
if (buf.length === 0) {
return buf
}
obj.copy(buf, 0, 0, len)
return buf
}
if (obj.length !== undefined) {
if (typeof obj.length !== 'number' || Buffer.numberIsNaN(obj.length)) {
return Buffer.createBuffer(0)
}
return Buffer.fromArrayLike(obj)
}
if (obj.type === 'Buffer' && Array.isArray(obj.data)) {
return Buffer.fromArrayLike(obj.data)
}
throw TypeError('Unable create buffer from given object.')
}
// uint8 signatures
public static from(
iterable: Iterable<number>,
mapfn?: (v: number, k: number) => number,
thisArg?: any
): Buffer
public static from(array: ArrayLike<number>): Buffer
public static from(
data: any[] | string | Buffer | ArrayBuffer /*| TypedArray*/
): Buffer
// node.d.ts signatures
public static from(
arrayBuffer: ArrayBuffer,
byteOffset?: number,
length?: number
): Buffer
public static from(
data: any[] | string | Buffer | ArrayBuffer /*| TypedArray*/
): Buffer
public static from(str: string, encoding?: string): Buffer
public static from(...args: any[]): Buffer {
if (typeof args[0] === 'string') {
return Buffer.fromString(args[0], args[1])
}
if (ArrayBuffer.isView(args[0])) {
return Buffer.fromArrayLike(args[0] as Uint8Array)
}
if (args[0] == null) {
throw TypeError(
'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' +
'or Array-like Object. Received type ' +
typeof args[0]
)
}
if (
Buffer.isInstance(args[0], ArrayBuffer) ||
(args[0] && Buffer.isInstance(args[0].buffer, ArrayBuffer))
) {
return Buffer.fromArrayBuffer(
args[0],
args[1] as number,
args[2] as number
)
}
if (typeof args[0] === 'number') {
throw new TypeError(
'The "value" argument must not be of type number. Received type number'
)
}
const valueOf = args[0].valueOf && args[0].valueOf()
if (valueOf != null && valueOf !== args[0]) {
return Buffer.from(args[0], args[1], args[2])
}
const b = Buffer.fromObject(args[0])
if (b) return b
if (
typeof Symbol !== 'undefined' &&
Symbol.toPrimitive != null &&
typeof args[0][Symbol.toPrimitive] === 'function'
) {
return Buffer.from(
args[0][Symbol.toPrimitive]('string'),
args[1],
args[2]
)
}
throw new TypeError(
'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' +
'or Array-like Object. Received type ' +
typeof args[0]
)
}
public static isBuffer(b: Buffer): boolean {
return b instanceof Buffer
}
// #region static: byteLength
public static byteLength(
string: string | Buffer | DataView | ArrayBuffer,
encoding?: string
): number
public static byteLength(...args: any[]): number {
if (Buffer.isBuffer(args[0])) {
return args[0].length
}
if (
ArrayBuffer.isView(args[0]) ||
Buffer.isInstance(args[0], ArrayBuffer)
) {
return args[0].byteLength
}
if (typeof args[0] !== 'string') {
throw new TypeError(
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. ' +
'Received type ' +
typeof args[0]
)
}
const len = args[0].length
const mustMatch = args.length > 2 && args[2] === true
if (!mustMatch && len === 0) return 0
// Use a for loop to avoid recursion
let loweredCase = false
for (;;) {
switch (args[1]) {
case 'ascii':
case 'latin1':
case 'binary':
return len
case 'utf8':
case 'utf-8':
return Buffer.utf8ToBytes(args[0]).length
case 'ucs2':
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
return len * 2
case 'hex':
return len >>> 1
case 'base64':
return Buffer.base64ToBytes(args[0]).length
default:
if (loweredCase) {
return mustMatch ? -1 : Buffer.utf8ToBytes(args[0]).length // assume utf8
}
args[1] = ('' + args[1]).toLowerCase()
loweredCase = true
}
}
}
// #region static: compare
public static compare(a: Buffer, b: Buffer) {
if (Buffer.isInstance(a, Uint8Array)) {
a = Buffer.from(a, a.byteOffset, a.byteLength)
}
if (Buffer.isInstance(b, Uint8Array)) {
b = Buffer.from(b, b.byteOffset, b.byteLength)
}
if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) {
throw new TypeError(
'The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array'
)
}
if (a === b) return 0
let x = a.length
let y = b.length
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
if (a[i] !== b[i]) {
x = a[i]
y = b[i]
break
}
}
if (x < y) return -1
if (y < x) return 1
return 0
}
// #region static: isEncoding
public static isEncoding(encoding: Encoding) {
switch (String(encoding).toLowerCase()) {
case 'hex':
case 'utf8':
case 'utf-8':
case 'ascii':
case 'latin1':
case 'binary':
case 'base64':
case 'ucs2':
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
return true
default:
return false
}
}
// #region static: concat
public static concat(list: Buffer[], length?: number) {
if (!Array.isArray(list)) {
throw new TypeError('"list" argument must be an Array of Buffers')
}
if (list.length === 0) {
return Buffer.alloc(0)
}
let i
if (length === undefined) {
length = 0
for (i = 0; i < list.length; ++i) {
length += list[i].length
}
}
const buffer = Buffer.allocUnsafe(length)
let pos = 0
for (i = 0; i < list.length; ++i) {
let buf = list[i]
if (Buffer.isInstance(buf, Uint8Array)) {
buf = Buffer.from(buf)
}
if (!Buffer.isBuffer(buf)) {
throw new TypeError('"list" argument must be an Array of Buffers')
}
buf.copy(buffer, pos)
pos += buf.length
}
return buffer
}
// #region private-static: helpers
private static assertSize(size: number) {
if (typeof size !== 'number') {
throw new TypeError(`'size' argument must be of type number`)
} else if (size < 0) {
throw new RangeError(`The value '${size}' is invalid for option "size"`)
}
}
private static createBuffer(length: number): Buffer {
if (length > K_MAX_LENGTH) {
throw new RangeError(
'The value "' + length + '" is invalid for option "size"'
)
}
// Return an augmented `Uint8Array` instance
return new Buffer(length)
}
private static checked(length: number): number {
// Note: cannot use `length < K_MAX_LENGTH` here because that fails when
// length is NaN (which is otherwise coerced to zero.)
if (length >= K_MAX_LENGTH) {
throw new RangeError(
`Attempt to allocate Buffer larger than maximum ` +
`size: 0x${K_MAX_LENGTH.toString(16)} bytes`
)
}
return length | 0
}
private static base64clean(str: string): string {
// Node takes equal signs as end of the Base64 encoding
str = str.split('=')[0]
// Node strips out invalid characters like \n and \t from the string, base64-js does not
str = str.trim().replace(INVALID_BASE64_RE, '')
// Node converts strings with length < 2 to ''
if (str.length < 2) return ''
// Node allows for non-padded base64 strings (missing trailing ===), base64-js does not
while (str.length % 4 !== 0) {
str = str + '='
}
return str
}
private static toHex(n: number): string {
if (n < 16) return '0' + n.toString(16)
return n.toString(16)
}
private static utf8ToBytes(str: string, units?: number) {
units = units || Infinity
let codePoint
const length = str.length
let leadSurrogate = null
const bytes = []
for (let i = 0; i < length; ++i) {
codePoint = str.charCodeAt(i)
// is surrogate component
if (codePoint > 0xd7ff && codePoint < 0xe000) {
// last char was a lead
if (!leadSurrogate) {
// no lead yet
if (codePoint > 0xdbff) {
// unexpected trail
if ((units -= 3) > -1) bytes.push(0xef, 0xbf, 0xbd)
continue
} else if (i + 1 === length) {
// unpaired lead
if ((units -= 3) > -1) bytes.push(0xef, 0xbf, 0xbd)
continue
}
// valid lead
leadSurrogate = codePoint
continue
}
// 2 leads in a row
if (codePoint < 0xdc00) {
if ((units -= 3) > -1) bytes.push(0xef, 0xbf, 0xbd)
leadSurrogate = codePoint
continue
}
// valid surrogate pair
codePoint =
(((leadSurrogate - 0xd800) << 10) | (codePoint - 0xdc00)) + 0x10000
} else if (leadSurrogate) {
// valid bmp char, but last char was a lead
if ((units -= 3) > -1) bytes.push(0xef, 0xbf, 0xbd)
}
leadSurrogate = null
// encode utf8
if (codePoint < 0x80) {
if ((units -= 1) < 0) break
bytes.push(codePoint)
} else if (codePoint < 0x800) {
if ((units -= 2) < 0) break
bytes.push((codePoint >> 0x6) | 0xc0, (codePoint & 0x3f) | 0x80)
} else if (codePoint < 0x10000) {
if ((units -= 3) < 0) break
bytes.push(
(codePoint >> 0xc) | 0xe0,
((codePoint >> 0x6) & 0x3f) | 0x80,
(codePoint & 0x3f) | 0x80
)
} else if (codePoint < 0x110000) {
if ((units -= 4) < 0) break
bytes.push(
(codePoint >> 0x12) | 0xf0,
((codePoint >> 0xc) & 0x3f) | 0x80,
((codePoint >> 0x6) & 0x3f) | 0x80,
(codePoint & 0x3f) | 0x80
)
} else {
throw new Error('Invalid code point')
}
}
return bytes
}
private static asciiToBytes(str: string) {
const byteArray = []
for (let i = 0; i < str.length; ++i) {
// Node's code seems to be doing this and not & 0x7F..
byteArray.push(str.charCodeAt(i) & 0xff)
}
return byteArray
}
private static utf16leToBytes(str: string, units: number): ArrayLike<number> {
let c, hi, lo
const byteArray = []
for (let i = 0; i < str.length; ++i) {
if ((units -= 2) < 0) {
break
}
c = str.charCodeAt(i)
hi = c >> 8
lo = c % 256
byteArray.push(lo)
byteArray.push(hi)
}
return byteArray
}
private static base64ToBytes(str: string) {
return base64.toByteArray(Buffer.base64clean(str))
}
private static blitBuffer(
src: ArrayLike<number>,
dst: Array<number> | Buffer,
offset: number,
length: number
): number {
for (var i = 0; i < length; ++i) {
if (i + offset >= dst.length || i >= src.length) break
dst[i + offset] = src[i]
}
return i
}
// ArrayBuffer or Uint8Array objects from other contexts (i.e. iframes) do not pass
// the `instanceof` check but they should be treated as of that type.
// See: https://github.com/feross/Buffer/issues/166
private static isInstance(obj: any, type: any) {
return (
obj instanceof type ||
(obj != null &&
obj.constructor != null &&
obj.constructor.name != null &&
obj.constructor.name === type.name)
)
}
private static numberIsNaN(obj: number) {
// For IE11 support
return obj !== obj // eslint-disable-line no-self-compare
}
} | the_stack |
import { RuleOptions } from './types';
import { alpha, lighten } from '@material-ui/core';
export default ({ theme }: RuleOptions) => `
/*================== Variables ==================*/
/*
As the MkDocs output is rendered in shadow DOM, the CSS variable definitions on the root selector are not applied. Instead, they have to be applied on :host.
As there is no way to transform the served main*.css yet (for example in the backend), we have to copy from main*.css and modify them.
*/
:host {
/* FONT */
--md-default-fg-color: ${theme.palette.text.primary};
--md-default-fg-color--light: ${theme.palette.text.secondary};
--md-default-fg-color--lighter: ${lighten(theme.palette.text.secondary, 0.7)};
--md-default-fg-color--lightest: ${lighten(
theme.palette.text.secondary,
0.3,
)};
/* BACKGROUND */
--md-default-bg-color:${theme.palette.background.default};
--md-default-bg-color--light: ${theme.palette.background.paper};
--md-default-bg-color--lighter: ${lighten(
theme.palette.background.paper,
0.7,
)};
--md-default-bg-color--lightest: ${lighten(
theme.palette.background.paper,
0.3,
)};
/* PRIMARY */
--md-primary-fg-color: ${theme.palette.primary.main};
--md-primary-fg-color--light: ${theme.palette.primary.light};
--md-primary-fg-color--dark: ${theme.palette.primary.dark};
--md-primary-bg-color: ${theme.palette.primary.contrastText};
--md-primary-bg-color--light: ${lighten(
theme.palette.primary.contrastText,
0.7,
)};
/* ACCENT */
--md-accent-fg-color: var(--md-primary-fg-color);
/* SHADOW */
--md-shadow-z1: ${theme.shadows[1]};
--md-shadow-z2: ${theme.shadows[2]};
--md-shadow-z3: ${theme.shadows[3]};
/* EXTENSIONS */
--md-admonition-fg-color: var(--md-default-fg-color);
--md-admonition-bg-color: var(--md-default-bg-color);
/* Admonitions and others are using SVG masks to define icons. These masks are defined as CSS variables. */
--md-admonition-icon--note: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M20.71 7.04c.39-.39.39-1.04 0-1.41l-2.34-2.34c-.37-.39-1.02-.39-1.41 0l-1.84 1.83 3.75 3.75M3 17.25V21h3.75L17.81 9.93l-3.75-3.75L3 17.25z"/></svg>');
--md-admonition-icon--abstract: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M4 5h16v2H4V5m0 4h16v2H4V9m0 4h16v2H4v-2m0 4h10v2H4v-2z"/></svg>');
--md-admonition-icon--info: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M13 9h-2V7h2m0 10h-2v-6h2m-1-9A10 10 0 002 12a10 10 0 0010 10 10 10 0 0010-10A10 10 0 0012 2z"/></svg>');
--md-admonition-icon--tip: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M17.55 11.2c-.23-.3-.5-.56-.76-.82-.65-.6-1.4-1.03-2.03-1.66C13.3 7.26 13 4.85 13.91 3c-.91.23-1.75.75-2.45 1.32-2.54 2.08-3.54 5.75-2.34 8.9.04.1.08.2.08.33 0 .22-.15.42-.35.5-.22.1-.46.04-.64-.12a.83.83 0 01-.15-.17c-1.1-1.43-1.28-3.48-.53-5.12C5.89 10 5 12.3 5.14 14.47c.04.5.1 1 .27 1.5.14.6.4 1.2.72 1.73 1.04 1.73 2.87 2.97 4.84 3.22 2.1.27 4.35-.12 5.96-1.6 1.8-1.66 2.45-4.32 1.5-6.6l-.13-.26c-.2-.46-.47-.87-.8-1.25l.05-.01m-3.1 6.3c-.28.24-.73.5-1.08.6-1.1.4-2.2-.16-2.87-.82 1.19-.28 1.89-1.16 2.09-2.05.17-.8-.14-1.46-.27-2.23-.12-.74-.1-1.37.18-2.06.17.38.37.76.6 1.06.76 1 1.95 1.44 2.2 2.8.04.14.06.28.06.43.03.82-.32 1.72-.92 2.27h.01z"/></svg>');
--md-admonition-icon--success: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M12 2C6.5 2 2 6.5 2 12s4.5 10 10 10 10-4.5 10-10S17.5 2 12 2m-2 15l-5-5 1.41-1.41L10 14.17l7.59-7.59L19 8l-9 9z"/></svg>');
--md-admonition-icon--question: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M15.07 11.25l-.9.92C13.45 12.89 13 13.5 13 15h-2v-.5c0-1.11.45-2.11 1.17-2.83l1.24-1.26c.37-.36.59-.86.59-1.41a2 2 0 00-2-2 2 2 0 00-2 2H8a4 4 0 014-4 4 4 0 014 4 3.2 3.2 0 01-.93 2.25M13 19h-2v-2h2M12 2A10 10 0 002 12a10 10 0 0010 10 10 10 0 0010-10c0-5.53-4.5-10-10-10z"/></svg>');
--md-admonition-icon--warning: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M13 14h-2v-4h2m0 8h-2v-2h2M1 21h22L12 2 1 21z"/></svg>');
--md-admonition-icon--failure: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M12 2c5.53 0 10 4.47 10 10s-4.47 10-10 10S2 17.53 2 12 6.47 2 12 2m3.59 5L12 10.59 8.41 7 7 8.41 10.59 12 7 15.59 8.41 17 12 13.41 15.59 17 17 15.59 13.41 12 17 8.41 15.59 7z"/></svg>');
--md-admonition-icon--danger: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M11.5 20l4.86-9.73H13V4l-5 9.73h3.5V20M12 2c2.75 0 5.1 1 7.05 2.95C21 6.9 22 9.25 22 12s-1 5.1-2.95 7.05C17.1 21 14.75 22 12 22s-5.1-1-7.05-2.95C3 17.1 2 14.75 2 12s1-5.1 2.95-7.05C6.9 3 9.25 2 12 2z"/></svg>');
--md-admonition-icon--bug: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M14 12h-4v-2h4m0 6h-4v-2h4m6-6h-2.81a5.985 5.985 0 00-1.82-1.96L17 4.41 15.59 3l-2.17 2.17a6.002 6.002 0 00-2.83 0L8.41 3 7 4.41l1.62 1.63C7.88 6.55 7.26 7.22 6.81 8H4v2h2.09c-.05.33-.09.66-.09 1v1H4v2h2v1c0 .34.04.67.09 1H4v2h2.81c1.04 1.79 2.97 3 5.19 3s4.15-1.21 5.19-3H20v-2h-2.09c.05-.33.09-.66.09-1v-1h2v-2h-2v-1c0-.34-.04-.67-.09-1H20V8z"/></svg>');
--md-admonition-icon--example: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M7 13v-2h14v2H7m0 6v-2h14v2H7M7 7V5h14v2H7M3 8V5H2V4h2v4H3m-1 9v-1h3v4H2v-1h2v-.5H3v-1h1V17H2m2.25-7a.75.75 0 01.75.75c0 .2-.08.39-.21.52L3.12 13H5v1H2v-.92L4 11H2v-1h2.25z"/></svg>');
--md-admonition-icon--quote: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M14 17h3l2-4V7h-6v6h3M6 17h3l2-4V7H5v6h3l-2 4z"/></svg>');
--md-footnotes-icon: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M19 7v4H5.83l3.58-3.59L8 6l-6 6 6 6 1.41-1.42L5.83 13H21V7h-2z"/></svg>');
--md-details-icon: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M8.59 16.58 13.17 12 8.59 7.41 10 6l6 6-6 6-1.41-1.42z"/></svg>');
--md-tasklist-icon: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M0 0h24v24H0z" fill="none"/><path d="M19 5v14H5V5h14m0-2H5c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h14c1.1 0 2-.9 2-2V5c0-1.1-.9-2-2-2z"/></svg>');
--md-tasklist-icon--checked: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M0 0h24v24H0z" fill="none"/><path d="M19 3H5c-1.11 0-2 .9-2 2v14c0 1.1.89 2 2 2h14c1.11 0 2-.9 2-2V5c0-1.1-.89-2-2-2zm-9 14l-5-5 1.41-1.41L10 14.17l7.59-7.59L19 8l-9 9z"/></svg>');
--md-nav-icon--prev: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M20 11v2H8l5.5 5.5-1.42 1.42L4.16 12l7.92-7.92L13.5 5.5 8 11h12z"/></svg>');
--md-nav-icon--next: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M8.59 16.58 13.17 12 8.59 7.41 10 6l6 6-6 6-1.41-1.42z"/></svg>');
--md-toc-icon: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M3 9h14V7H3v2m0 4h14v-2H3v2m0 4h14v-2H3v2m16 0h2v-2h-2v2m0-10v2h2V7h-2m0 6h2v-2h-2v2z"/></svg>');
--md-clipboard-icon: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M19 21H8V7h11m0-2H8a2 2 0 0 0-2 2v14a2 2 0 0 0 2 2h11a2 2 0 0 0 2-2V7a2 2 0 0 0-2-2m-3-4H4a2 2 0 0 0-2 2v14h2V3h12V1z"/></svg>');
--md-search-result-icon: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h7c-.41-.25-.8-.56-1.14-.9-.33-.33-.61-.7-.86-1.1H6V4h7v5h5v1.18c.71.16 1.39.43 2 .82V8l-6-6m6.31 16.9c1.33-2.11.69-4.9-1.4-6.22-2.11-1.33-4.91-.68-6.22 1.4-1.34 2.11-.69 4.89 1.4 6.22 1.46.93 3.32.93 4.79.02L22 23.39 23.39 22l-3.08-3.1m-3.81.1a2.5 2.5 0 0 1-2.5-2.5 2.5 2.5 0 0 1 2.5-2.5 2.5 2.5 0 0 1 2.5 2.5 2.5 2.5 0 0 1-2.5 2.5z"/></svg>');
--md-source-forks-icon: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M5 3.25a.75.75 0 1 1-1.5 0 .75.75 0 0 1 1.5 0zm0 2.122a2.25 2.25 0 1 0-1.5 0v.878A2.25 2.25 0 0 0 5.75 8.5h1.5v2.128a2.251 2.251 0 1 0 1.5 0V8.5h1.5a2.25 2.25 0 0 0 2.25-2.25v-.878a2.25 2.25 0 1 0-1.5 0v.878a.75.75 0 0 1-.75.75h-4.5A.75.75 0 0 1 5 6.25v-.878zm3.75 7.378a.75.75 0 1 1-1.5 0 .75.75 0 0 1 1.5 0zm3-8.75a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5z"/></svg>');
--md-source-repositories-icon: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M2 2.5A2.5 2.5 0 0 1 4.5 0h8.75a.75.75 0 0 1 .75.75v12.5a.75.75 0 0 1-.75.75h-2.5a.75.75 0 1 1 0-1.5h1.75v-2h-8a1 1 0 0 0-.714 1.7.75.75 0 0 1-1.072 1.05A2.495 2.495 0 0 1 2 11.5v-9zm10.5-1V9h-8c-.356 0-.694.074-1 .208V2.5a1 1 0 0 1 1-1h8zM5 12.25v3.25a.25.25 0 0 0 .4.2l1.45-1.087a.25.25 0 0 1 .3 0L8.6 15.7a.25.25 0 0 0 .4-.2v-3.25a.25.25 0 0 0-.25-.25h-3.5a.25.25 0 0 0-.25.25z"/></svg>');
--md-source-stars-icon: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M8 .25a.75.75 0 0 1 .673.418l1.882 3.815 4.21.612a.75.75 0 0 1 .416 1.279l-3.046 2.97.719 4.192a.75.75 0 0 1-1.088.791L8 12.347l-3.766 1.98a.75.75 0 0 1-1.088-.79l.72-4.194L.818 6.374a.75.75 0 0 1 .416-1.28l4.21-.611L7.327.668A.75.75 0 0 1 8 .25zm0 2.445L6.615 5.5a.75.75 0 0 1-.564.41l-3.097.45 2.24 2.184a.75.75 0 0 1 .216.664l-.528 3.084 2.769-1.456a.75.75 0 0 1 .698 0l2.77 1.456-.53-3.084a.75.75 0 0 1 .216-.664l2.24-2.183-3.096-.45a.75.75 0 0 1-.564-.41L8 2.694v.001z"/></svg>');
--md-source-version-icon: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M2.5 7.775V2.75a.25.25 0 0 1 .25-.25h5.025a.25.25 0 0 1 .177.073l6.25 6.25a.25.25 0 0 1 0 .354l-5.025 5.025a.25.25 0 0 1-.354 0l-6.25-6.25a.25.25 0 0 1-.073-.177zm-1.5 0V2.75C1 1.784 1.784 1 2.75 1h5.025c.464 0 .91.184 1.238.513l6.25 6.25a1.75 1.75 0 0 1 0 2.474l-5.026 5.026a1.75 1.75 0 0 1-2.474 0l-6.25-6.25A1.75 1.75 0 0 1 1 7.775zM6 5a1 1 0 1 0 0 2 1 1 0 0 0 0-2z"/></svg>');
--md-version-icon: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 320 512"><!--! Font Awesome Free 6.0.0 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) Copyright 2022 Fonticons, Inc.--><path d="m310.6 246.6-127.1 128c-7.1 6.3-15.3 9.4-23.5 9.4s-16.38-3.125-22.63-9.375l-127.1-128C.224 237.5-2.516 223.7 2.438 211.8S19.07 192 32 192h255.1c12.94 0 24.62 7.781 29.58 19.75s3.12 25.75-6.08 34.85z"/></svg>');
}
:host > * {
/* CODE */
--md-code-fg-color: ${theme.palette.text.primary};
--md-code-bg-color: ${theme.palette.background.paper};
--md-code-hl-color: ${alpha(theme.palette.warning.main, 0.5)};
--md-code-hl-keyword-color: ${
theme.palette.type === 'dark'
? theme.palette.primary.light
: theme.palette.primary.dark
};
--md-code-hl-function-color: ${
theme.palette.type === 'dark'
? theme.palette.secondary.light
: theme.palette.secondary.dark
};
--md-code-hl-string-color: ${
theme.palette.type === 'dark'
? theme.palette.success.light
: theme.palette.success.dark
};
--md-code-hl-number-color: ${
theme.palette.type === 'dark'
? theme.palette.error.light
: theme.palette.error.dark
};
--md-code-hl-constant-color: var(--md-code-hl-function-color);
--md-code-hl-special-color: var(--md-code-hl-function-color);
--md-code-hl-name-color: var(--md-code-fg-color);
--md-code-hl-comment-color: var(--md-default-fg-color--light);
--md-code-hl-generic-color: var(--md-default-fg-color--light);
--md-code-hl-variable-color: var(--md-default-fg-color--light);
--md-code-hl-operator-color: var(--md-default-fg-color--light);
--md-code-hl-punctuation-color: var(--md-default-fg-color--light);
/* TYPESET */
--md-typeset-font-size: 1rem;
--md-typeset-color: var(--md-default-fg-color);
--md-typeset-a-color: var(--md-accent-fg-color);
--md-typeset-table-color: ${theme.palette.text.primary};
--md-typeset-del-color: ${
theme.palette.type === 'dark'
? alpha(theme.palette.error.dark, 0.5)
: alpha(theme.palette.error.light, 0.5)
};
--md-typeset-ins-color: ${
theme.palette.type === 'dark'
? alpha(theme.palette.success.dark, 0.5)
: alpha(theme.palette.success.light, 0.5)
};
--md-typeset-mark-color: ${
theme.palette.type === 'dark'
? alpha(theme.palette.warning.dark, 0.5)
: alpha(theme.palette.warning.light, 0.5)
};
}
@media screen and (max-width: 76.1875em) {
:host > * {
/* TYPESET */
--md-typeset-font-size: .9rem;
}
}
@media screen and (max-width: 600px) {
:host > * {
/* TYPESET */
--md-typeset-font-size: .7rem;
}
}
`; | the_stack |
import * as tape from 'tape';
import reifyRFO from '@core/reflection/reifyRFO';
tape(
'reflection.reifyRFO',
(t) => {
const specs = [
// named standard function
[
{
__function: {
type: 'FunctionExpression',
name: '_foobar',
params: ['a', 'b', 'c'],
body: ' return 123;',
},
},
'function _foobar(a, b, c) {\n return 123;\n}',
],
[
{
__function: {
type: 'FunctionExpression',
name: '_foobar',
params: ['a', 'b', 'c'],
body: ' return 123;',
},
},
'function _foobar(a, b, c) {\n return 123;\n}',
],
[
{
__function: {
type: 'FunctionExpression',
name: '_foobar',
params: ['a', 'b', 'c'],
body: ' return 123;',
},
},
'function _foobar(a, b, c) {\n return 123;\n}',
],
[
{
__function: {
type: 'FunctionExpression',
name: '_foobar',
params: ['a', 'b', 'c'],
body: ' /** hyasdf\\n yaya\\n */return 123; // thingy',
},
},
'function _foobar(a, b, c) {\n /** hyasdf\\n yaya\\n */return 123; // thingy\n}',
],
// anonymous standard function
[
{
__function: {
type: 'FunctionExpression',
name: null,
params: ['a', 'b', 'c'],
body: ' return 123;',
},
},
'function (a, b, c) {\n return 123;\n}',
],
[
{
__function: {
type: 'FunctionExpression',
name: null,
params: ['a', 'b', 'c'],
body: ' return 123;',
},
},
'function (a, b, c) {\n return 123;\n}',
],
[
{
__function: {
type: 'FunctionExpression',
name: null,
params: ['a', 'b', 'c'],
body: ' return 123;',
},
},
'function (a, b, c) {\n return 123;\n}',
],
[
{
__function: {
type: 'FunctionExpression',
name: null,
params: ['a', 'b', 'c'],
body: ' /** hyasdf\\n yaya\\n */return 123; // thingy',
},
},
'function (a, b, c) {\n /** hyasdf\\n yaya\\n */return 123; // thingy\n}',
],
// anonymous standard function
[
{
__function: {
type: 'FunctionExpression',
params: ['a', 'b', 'c'],
body: ' return 123;',
},
},
'function (a, b, c) {\n return 123;\n}',
],
[
{
__function: {
type: 'FunctionExpression',
params: ['a', 'b', 'c'],
body: ' return 123;',
},
},
'function (a, b, c) {\n return 123;\n}',
],
[
{
__function: {
type: 'FunctionExpression',
params: ['a', 'b', 'c'],
body: ' return 123;',
},
},
'function (a, b, c) {\n return 123;\n}',
],
[
{
__function: {
type: 'FunctionExpression',
params: ['a', 'b', 'c'],
body: ' /** hyasdf\\n yaya\\n */return 123; // thingy',
},
},
'function (a, b, c) {\n /** hyasdf\\n yaya\\n */return 123; // thingy\n}',
],
// anonymous standard function
[
{
__function: {
params: ['a', 'b', 'c'],
body: ' return 123;',
},
},
'function (a, b, c) {\n return 123;\n}',
],
[
{
__function: {
params: ['a', 'b', 'c'],
body: ' return 123;',
},
},
'function (a, b, c) {\n return 123;\n}',
],
[
{
__function: {
params: ['a', 'b', 'c'],
body: ' return 123;',
},
},
'function (a, b, c) {\n return 123;\n}',
],
[
{
__function: {
params: ['a', 'b', 'c'],
body: ' /** hyasdf\\n yaya\\n */return 123; // thingy',
},
},
'function (a, b, c) {\n /** hyasdf\\n yaya\\n */return 123; // thingy\n}',
],
// anonymous arrow function with braced body
[
{
__function: {
type: 'ArrowFunctionExpression',
name: null,
params: ['a', 'b', 'c'],
body: ' return 123;',
},
},
'(a, b, c) => {\n return 123;\n}',
],
[
{
__function: {
type: 'ArrowFunctionExpression',
name: null,
params: ['a', 'b', 'c'],
body: ' return 123;',
},
},
'(a, b, c) => {\n return 123;\n}',
],
[
{
__function: {
type: 'ArrowFunctionExpression',
name: null,
params: ['a', 'b', 'c'],
body: ' return 123;',
},
},
'(a, b, c) => {\n return 123;\n}',
],
[
{
__function: {
type: 'ArrowFunctionExpression',
name: null,
params: ['a', 'b', 'c'],
body: ' /** hyasdf\\n yaya\\n */return 123; // thingy',
},
},
'(a, b, c) => {\n /** hyasdf\\n yaya\\n */return 123; // thingy\n}',
],
// standard with argument object destructuring
[
{
__function: {
type: 'FunctionExpression',
name: null,
params: [
{
a: 'a',
b: 'b',
c: 'c',
},
],
body: ' return 123;',
},
},
'function ({ a, b, c }) {\n return 123;\n}',
],
[
{
__function: {
type: 'FunctionExpression',
name: null,
params: [
{
a: 'a',
b: 'b',
c: {
d: 'd',
e: 'e',
},
},
],
body: ' return 123;',
},
},
'function ({ a, b, c, c: { d, e } }) {\n return 123;\n}',
],
[
{
__function: {
type: 'FunctionExpression',
name: null,
params: [
{
a: 'a',
b: {},
c: ['d', 'e'],
},
],
body: ' return 123;',
},
},
'function ({ a, b, c, c: [ d, e ] }) {\n return 123;\n}',
],
[
{
__function: {
type: 'FunctionExpression',
name: null,
params: [
{
a: 'a',
b: {},
c: ['d', 'e'],
}, {
f: 'f',
z: ['g', 'h'],
i: 'i',
j: 'j',
k: {
l: 'l',
m: {
n: {},
o: [],
},
},
}, {__rest: 'args'},
],
body: ' return 123;',
},
},
// tslint:disable-next-line:max-line-length
'function ({ a, b, c, c: [ d, e ] }, { f, z, z: [ g, h ], i, j, k, k: { l, m, m: { n, o } } }, ...args) {\n return 123;\n}',
],
// // arrow with argument object destructuring
[
{
__function: {
type: 'ArrowFunctionExpression',
name: null,
params: [
{
a: 'a',
b: {},
c: 'c',
},
],
body: ' return 123;',
},
},
'({ a, b, c }) => {\n return 123;\n}',
],
[
{
__function: {
type: 'ArrowFunctionExpression',
name: null,
params: [
{
a: 'a',
b: 'b',
c: {
d: {},
e: 'e',
},
},
],
body: ' return 123;',
},
},
'({ a, b, c, c: { d, e } }) => {\n return 123;\n}',
],
[
{
__function: {
type: 'ArrowFunctionExpression',
name: null,
params: [
{
a: 'a',
b: 'b',
c: ['d', 'e'],
},
],
body: ' return 123;',
},
},
'({ a, b, c, c: [ d, e ] }) => {\n return 123;\n}',
],
[
{
__function: {
type: 'ArrowFunctionExpression',
name: null,
params: [
{
a: 'a',
b: 'b',
c: ['d', 'e'],
}, {
f: 'f',
z: ['g', 'h'],
i: 'i',
j: 'j',
k: {
l: 'l',
m: {
n: 'n',
o: 'o',
},
},
}, {__rest: 'args'},
],
body: ' return 123;',
},
},
// tslint:disable-next-line:max-line-length
'({ a, b, c, c: [ d, e ] }, { f, z, z: [ g, h ], i, j, k, k: { l, m, m: { n, o } } }, ...args) => {\n return 123;\n}',
],
];
t.plan(specs.length);
specs.forEach((spec: any) => {
const object = spec[0].__function;
const expected = spec[1];
const result = reifyRFO(object);
t.equal(
JSON.stringify(result.toString()),
JSON.stringify(expected),
);
});
},
); | the_stack |
import { action } from 'mobx';
import { observer } from 'mobx-react-lite';
import * as c from '@aws-accelerator/config';
import * as t from '@aws-accelerator/common-types';
import { Box, Checkbox, CheckboxProps, Container, FormField, Header, SpaceBetween } from '@awsui/components-react';
import { useI18n } from '@/components/i18n-context';
import { Indent } from '@/components/indent';
import { TypeTreeNode } from '@/types';
import { useEnableNode } from '../util';
import { AcceleratorConfigurationNode } from '../configuration';
import { WizardField, WizardInlineBooleanField } from '../components/fields';
import { getVpcNodes } from '../components/vpc-table';
export interface SelectSecurityServicesStepProps {
configuration: any;
}
const globalOptionsNode = AcceleratorConfigurationNode.nested('global-options');
const securityServicesNode = globalOptionsNode.nested('central-security-services');
const securityHubNode = securityServicesNode.nested<t.BooleanType>('security-hub');
const logServicesNode = globalOptionsNode.nested('central-log-services');
const operationsServicesNode = globalOptionsNode.nested('central-operations-services');
const awsConfigNode = globalOptionsNode.nested('aws-config');
const standardsNode = globalOptionsNode.nested('security-hub-frameworks').nested('standards');
export const SelectSecurityServicesStep = observer(function SelectSecurityServicesStep({
configuration,
}: SelectSecurityServicesStepProps) {
const { tr } = useI18n();
return (
<SpaceBetween size="xxl" direction="vertical">
<Container
header={
<Header variant="h2" description={tr('wizard.headers.security_services_desc')}>
{tr('wizard.headers.security_services')}
</Header>
}
>
<SpaceBetween size="xs" direction="vertical">
<Box>{tr('wizard.labels.security_services_text')}</Box>
<SpaceBetween size="xxs">
<WizardInlineBooleanField state={configuration} node={securityHubNode} />
<Indent>
<SpaceBetween size="xxs">
<SecurityHubFrameworkStandard
standardsNode={standardsNode}
state={configuration}
name="AWS Foundational Security Best Practices v1.0.0"
initialControlsToDisable={['IAM.1']}
/>
<SecurityHubFrameworkStandard
standardsNode={standardsNode}
state={configuration}
name="PCI DSS v3.2.1"
initialControlsToDisable={['PCI.IAM.3', 'PCI.KMS.1', 'PCI.S3.3', 'PCI.EC2.3', 'PCI.Lambda.2']}
/>
<SecurityHubFrameworkStandard
standardsNode={standardsNode}
state={configuration}
name="CIS AWS Foundations Benchmark v1.2.0"
initialControlsToDisable={['CIS.1.20', 'CIS.1.22', 'CIS.2.8']}
/>
</SpaceBetween>
</Indent>
<WizardInlineBooleanField state={configuration} node={securityServicesNode.nested('guardduty')} />
<WizardInlineBooleanField state={configuration} node={securityServicesNode.nested('guardduty-s3')} />
<WizardInlineBooleanField state={configuration} node={securityServicesNode.nested('access-analyzer')} />
<WizardInlineBooleanField state={configuration} node={securityServicesNode.nested('macie')} />
<FormField
label={tr('wizard.fields.aws_config')}
description={tr('wizard.fields.aws_config_desc')}
stretch={true}
>
<Indent>
<SpaceBetween size="xxs">
<FrameworkProvidedRules state={configuration} />
<FrameworkProvidedRemediations state={configuration} />
</SpaceBetween>
</Indent>
</FormField>
<FormField
controlId={securityServicesNode.path.join('.')}
label={tr('wizard.fields.cwl_centralized_access')}
description={tr('wizard.fields.cwl_centralized_access_desc')}
stretch={true}
>
<Indent>
<SpaceBetween size="xxs">
<WizardInlineBooleanField
state={configuration}
node={securityServicesNode.nested('cwl')}
context={{
label: tr('wizard.fields.cwl_central_security_services_account'),
description: tr('wizard.fields.cwl_central_security_services_account_desc'),
}}
/>
<WizardInlineBooleanField
state={configuration}
node={operationsServicesNode.nested('cwl')}
context={{
label: tr('wizard.fields.cwl_central_operations_account'),
description: tr('wizard.fields.cwl_central_operations_account_desc'),
}}
/>
</SpaceBetween>
</Indent>
</FormField>
</SpaceBetween>
</SpaceBetween>
</Container>
<Container
header={
<Header variant="h2" description={tr('wizard.headers.security_services_logging_desc')}>
{tr('wizard.headers.security_services_logging')}
</Header>
}
>
<SpaceBetween size="xs" direction="vertical">
<Box>{tr('wizard.labels.security_services_logging_text')}</Box>
<SpaceBetween size="xxs">
<FormField
controlId={securityServicesNode.path.join('.')}
label={tr('wizard.fields.retention_periods_for')}
description={tr('wizard.fields.retention_periods_for_desc')}
stretch={true}
>
<Indent>
<SpaceBetween size="xxs">
<WizardField state={configuration} node={logServicesNode.nested('s3-retention')} />
<WizardField state={configuration} node={globalOptionsNode.nested('default-cwl-retention')} />
<WizardField state={configuration} node={globalOptionsNode.nested('default-s3-retention')} />
</SpaceBetween>
</Indent>
</FormField>
<FormField
label={tr('wizard.fields.vpc_flow_logs_all_vcps')}
description={tr('wizard.fields.vpc_flow_logs_all_vcps_desc')}
stretch={true}
>
<Indent>
<SpaceBetween size="xxs">
<FlowLogsComponent state={configuration} />
</SpaceBetween>
</Indent>
</FormField>
<DnsResolverLoggingComponent state={configuration} />
<FormField
controlId={securityServicesNode.path.join('.')}
label={tr('wizard.fields.ssm_logs_to')}
description={tr('wizard.fields.ssm_logs_to_desc')}
stretch={true}
>
<Indent>
<SpaceBetween size="xxs">
<WizardInlineBooleanField state={configuration} node={logServicesNode.nested('ssm-to-cwl')} />
<WizardInlineBooleanField state={configuration} node={logServicesNode.nested('ssm-to-s3')} />
</SpaceBetween>
</Indent>
</FormField>
</SpaceBetween>
</SpaceBetween>
</Container>
</SpaceBetween>
);
});
export interface SecurityHubFrameworkStandardProps {
state: any;
standardsNode: TypeTreeNode;
name: string;
initialControlsToDisable: string[];
label?: string;
}
/**
* Component that renders a single security hub framework standard checkbox.
*/
export const SecurityHubFrameworkStandard = observer((props: SecurityHubFrameworkStandardProps) => {
const { initialControlsToDisable, name, label = name, state, standardsNode } = props;
const securityHubEnabled = securityHubNode.get(state);
// Find the standard index by its name
const standards: any[] = standardsNode.get(state) ?? [];
let standardIndex = standards.findIndex((standard: any) => standard.name === name);
if (standardIndex < 0) {
standardIndex = standards.length;
}
// Track enabled changes
const standardNode = standardsNode.nested(standardIndex);
const [enabled, handleEnableChange] = useEnableNode(standardNode, state, () => ({
name,
'controls-to-disable': initialControlsToDisable,
}));
return (
<SpaceBetween size="s" direction="horizontal">
<Checkbox
checked={enabled}
disabled={securityHubEnabled !== true}
onChange={event => handleEnableChange(event.detail.checked)}
/>
<label>{label}</label>
</SpaceBetween>
);
});
interface FlowLogsComponentProps {
state: any;
}
const hasS3FlowLogsEnabled = (value: string) => value === 'S3' || value === 'BOTH';
const hasCloudWatchFlowLogsEnabled = (value: string) => value === 'CWL' || value === 'BOTH';
/**
* Component that renders checkboxes to manage VPC flow logs for all VPCs.
*/
const FlowLogsComponent = observer((props: FlowLogsComponentProps) => {
const { state } = props;
const { tr } = useI18n();
const vpcNodes = getVpcNodes(state);
const flowFlogNodes = vpcNodes.map(vpcNode => vpcNode.nested('flow-logs'));
const flowFlogValues = flowFlogNodes.map(flowLogNode => flowLogNode.get(state));
const s3Enabled = flowFlogValues.some(hasS3FlowLogsEnabled);
const s3Indeterminate = flowFlogValues.some(v => hasS3FlowLogsEnabled(v) !== s3Enabled);
const cwlEnabled = flowFlogValues.some(hasCloudWatchFlowLogsEnabled);
const cwlIndeterminate = flowFlogValues.some(v => hasCloudWatchFlowLogsEnabled(v) !== cwlEnabled);
// Transition from BOTH to CWL or from S3 to NONE
const handleS3Change: CheckboxProps['onChange'] = action(event => {
for (const flowFlogNode of flowFlogNodes) {
const flowLogValue = flowFlogNode.get(state);
if (event.detail.checked) {
if (flowLogValue === 'CWL') {
flowFlogNode.set(state, 'BOTH');
} else if (flowLogValue === 'NONE') {
flowFlogNode.set(state, 'S3');
}
} else {
if (flowLogValue === 'S3') {
flowFlogNode.set(state, 'NONE');
} else if (flowLogValue === 'BOTH') {
flowFlogNode.set(state, 'CWL');
}
}
}
});
// Transition from BOTH to S3 or from CWL to NONE
const handleCwlChange: CheckboxProps['onChange'] = action(event => {
for (const flowFlogNode of flowFlogNodes) {
const flowLogValue = flowFlogNode.get(state);
if (event.detail.checked) {
if (flowLogValue === 'S3') {
flowFlogNode.set(state, 'BOTH');
} else if (flowLogValue === 'NONE') {
flowFlogNode.set(state, 'CWL');
}
} else {
if (flowLogValue === 'CWL') {
flowFlogNode.set(state, 'NONE');
} else if (flowLogValue === 'BOTH') {
flowFlogNode.set(state, 'S3');
}
}
}
});
return (
<>
<SpaceBetween size="s" direction="horizontal">
<Checkbox checked={s3Enabled} indeterminate={s3Indeterminate} onChange={handleS3Change} />
<label>{tr('wizard.fields.vpc_flow_logs_s3')}</label>
</SpaceBetween>
<SpaceBetween size="s" direction="horizontal">
<Checkbox checked={cwlEnabled} indeterminate={cwlIndeterminate} onChange={handleCwlChange} />
<label>{tr('wizard.fields.vpc_flow_logs_cwl')}</label>
</SpaceBetween>
</>
);
});
interface DnsResolverLoggingComponentProps {
state: any;
}
/**
* Component that renders a checkbox to manage DNS resolver logging for all VPCs.
*/
const DnsResolverLoggingComponent = observer((props: DnsResolverLoggingComponentProps) => {
const { state } = props;
const { tr } = useI18n();
// Find all VPC nodes in the state
const vpcNodes = getVpcNodes(state);
// Load all logging nodes in all VPCs
const loggingNodes = vpcNodes.map(vpcNode => vpcNode.nested('dns-resolver-logging'));
const loggingValues = loggingNodes.map(flowLogNode => flowLogNode.get(state));
const enabled = loggingValues.some(v => v === true);
const indeterminate = loggingValues.some(v => v !== enabled);
const handleChange: CheckboxProps['onChange'] = action(event => {
for (const loggingNode of loggingNodes) {
loggingNode.set(state, event.detail.checked);
}
});
return (
<SpaceBetween size="s" direction="horizontal">
<Checkbox checked={enabled} indeterminate={indeterminate} onChange={handleChange} />
<label>{tr('wizard.fields.dns_resolver_logging_all_vpcs')}</label>
</SpaceBetween>
);
});
const frameworkProvidedRulesNode = awsConfigNode.nested('rules');
interface FrameworkProvidedRulesProps {
state: any;
}
const FrameworkProvidedRules = observer((props: FrameworkProvidedRulesProps) => {
const { state } = props;
const { tr } = useI18n();
const [enabled, setEnabled] = useEnableNode(frameworkProvidedRulesNode, state, () => []);
const rules = frameworkProvidedRulesNode.get(state) ?? [];
return (
<SpaceBetween size="s" direction="horizontal">
<Checkbox checked={enabled} disabled={rules.length === 0} onChange={e => setEnabled(e.detail.checked)} />
<label>{tr('wizard.fields.aws_config_rules', { count: rules.length })}</label>
</SpaceBetween>
);
});
interface FrameworkProvidedRemediationsProps {
state: any;
}
const ssmAutomationNode = globalOptionsNode.nested('ssm-automation');
const FrameworkProvidedRemediations = observer((props: FrameworkProvidedRemediationsProps) => {
const { state } = props;
const { tr } = useI18n();
const [enabled, setEnabled] = useEnableNode(ssmAutomationNode, state, () => []);
const ssmAutomation = (ssmAutomationNode.get(state) as c.SsmShareAutomation[]) ?? [];
const ssmAutomationDocumentCount = ssmAutomation.reduce<number>((c, a) => c + (a?.documents.length ?? 0), 0);
return (
<SpaceBetween size="s" direction="horizontal">
<Checkbox
checked={enabled}
disabled={ssmAutomationDocumentCount === 0}
onChange={e => setEnabled(e.detail.checked)}
/>
<label>{tr('wizard.fields.aws_config_remediations', { count: ssmAutomationDocumentCount })}</label>
</SpaceBetween>
);
}); | the_stack |
import * as appsync from "@aws-cdk/aws-appsync-alpha";
import { aws_dynamodb } from "aws-cdk-lib";
import { Construct } from "constructs";
import { JsonFormat } from "typesafe-dynamodb";
import {
NativeBinaryAttribute,
ToAttributeMap,
} from "typesafe-dynamodb/lib/attribute-value";
import {
ExpressionAttributeNames,
ExpressionAttributeValues,
} from "typesafe-dynamodb/lib/expression-attributes";
import { TableKey } from "typesafe-dynamodb/lib/key";
import { Narrow } from "typesafe-dynamodb/lib/narrow";
// @ts-expect-error - AppsyncResolver is imported for tsdoc
import { AppsyncResolver, AppSyncVtlIntegration } from "./appsync";
import { assertNodeKind } from "./assert";
import { ObjectLiteralExpr } from "./expression";
import {
IntegrationCall,
IntegrationInput,
makeIntegration,
} from "./integration";
import { AnyFunction } from "./util";
import { VTL } from "./vtl";
export function isTable(a: any): a is AnyTable {
return a?.kind === "Table";
}
export interface TableProps<
PartitionKey extends string,
RangeKey extends string | undefined = undefined
> extends Omit<
aws_dynamodb.TableProps,
"partitionKey" | "sortKey" | "tableName"
> {
/**
* Enforces a particular physical table name.
* @default generated
*
* [Internal Note] this property is copied because CDK tsdocs have a xml like tag
* around `generated` which breaks typedocs.
*/
readonly tableName?: string;
/**
* Partition key attribute definition.
*/
readonly partitionKey: {
name: PartitionKey;
type: aws_dynamodb.AttributeType;
};
/**
* Sort key attribute definition.
*
* @default no sort key
*/
readonly sortKey?: RangeKey extends undefined
? undefined
: { name: Exclude<RangeKey, undefined>; type: aws_dynamodb.AttributeType };
}
export type AnyTable = ITable<Record<string, any>, string, string | undefined>;
/**
* Wraps an {@link aws_dynamodb.Table} with a type-safe interface that can be
* called from within other {@link AppsyncResolver}.
*
* Its interface, e.g. `getItem`, `putItem`, is in 1:1 correspondence with the
* AWS Appsync Resolver API https://docs.aws.amazon.com/appsync/latest/devguide/resolver-mapping-template-reference-dynamodb.html
*
* For example:
* ```ts
* interface Person {
* id: string;
* name: string;
* age: number;
* }
*
* const personTable = new Table<Person, "id">(stack, id, { ... });
*
* const getPerson = new AppsyncResolver<
* (personId: string) => Person | undefined
* >(($context, personId: string) => {
* const person = personTable.get({
* key: {
* id: $util.toDynamoDB(personId)
* }
* });
*
* return person;
* });
* ```
*
* Note the type-signature of `Table<Person, "id">`. This declares a table whose contents
* are of the shape, `Person`, and that the PartitionKey is the `id` field.
*
* You can also specify the RangeKey:
* ```ts
* Table.fromTable<Person, "id", "age">(..)
* ```
* @see https://github.com/sam-goodwin/typesafe-dynamodb - for more information on how to model your DynamoDB table with TypeScript
*/
export interface ITable<
Item extends object,
PartitionKey extends keyof Item,
RangeKey extends keyof Item | undefined = undefined
> {
readonly kind: "Table";
/**
* This static property identifies this class as an EventBus to the TypeScript plugin.
*/
readonly functionlessKind: typeof Table.FunctionlessType;
/**
* The underlying {@link aws_dynamodb.ITable} Resource.
*/
readonly resource: aws_dynamodb.ITable;
readonly tableName: string;
readonly tableArn: string;
/**
* Brands this type with easy-access to the type parameters, Item, PartitionKey and RangeKey
*
* @note this value will never exist at runtime - it is purely compile-time information
*/
readonly _brand?: {
Item: Item;
PartitionKey: PartitionKey;
RangeKey: RangeKey;
};
/**
* @see https://docs.aws.amazon.com/appsync/latest/devguide/resolver-mapping-template-reference-dynamodb.html#aws-appsync-resolver-mapping-template-reference-dynamodb-getitem
*/
getItem: DynamoIntegrationCall<
"getItem",
<
Key extends TableKey<
Item,
PartitionKey,
RangeKey,
JsonFormat.AttributeValue
>
>(input: {
key: Key;
consistentRead?: boolean;
}) => Narrow<Item, AttributeKeyToObject<Key>, JsonFormat.Document>
>;
/**
* @see https://docs.aws.amazon.com/appsync/latest/devguide/resolver-mapping-template-reference-dynamodb.html#aws-appsync-resolver-mapping-template-reference-dynamodb-putitem
*/
putItem: DynamoIntegrationCall<
"putItem",
<
Key extends TableKey<
Item,
PartitionKey,
RangeKey,
JsonFormat.AttributeValue
>,
ConditionExpression extends string | undefined = undefined
>(input: {
key: Key;
attributeValues: ToAttributeMap<
Omit<
Narrow<Item, AttributeKeyToObject<Key>, JsonFormat.Document>,
Exclude<PartitionKey | RangeKey, undefined>
>
>;
condition?: DynamoExpression<ConditionExpression>;
_version?: number;
}) => Narrow<Item, AttributeKeyToObject<Key>, JsonFormat.Document>
>;
/**
* @see https://docs.aws.amazon.com/appsync/latest/devguide/resolver-mapping-template-reference-dynamodb.html#aws-appsync-resolver-mapping-template-reference-dynamodb-updateitem
*
* @returns the updated the item
*/
updateItem: DynamoIntegrationCall<
"updateItem",
<
Key extends TableKey<
Item,
PartitionKey,
RangeKey,
JsonFormat.AttributeValue
>,
UpdateExpression extends string,
ConditionExpression extends string | undefined
>(input: {
key: Key;
update: DynamoExpression<UpdateExpression>;
condition?: DynamoExpression<ConditionExpression>;
_version?: number;
}) => Narrow<Item, AttributeKeyToObject<Key>, JsonFormat.Document>
>;
/**
* @see https://docs.aws.amazon.com/appsync/latest/devguide/resolver-mapping-template-reference-dynamodb.html#aws-appsync-resolver-mapping-template-reference-dynamodb-deleteitem
*
* @returns the previous item.
*/
deleteItem: DynamoIntegrationCall<
"deleteItem",
<
Key extends TableKey<
Item,
PartitionKey,
RangeKey,
JsonFormat.AttributeValue
>,
ConditionExpression extends string | undefined
>(input: {
key: Key;
condition?: DynamoExpression<ConditionExpression>;
_version?: number;
}) => Narrow<Item, AttributeKeyToObject<Key>, JsonFormat.Document>
>;
/**
* @see https://docs.aws.amazon.com/appsync/latest/devguide/resolver-mapping-template-reference-dynamodb.html#aws-appsync-resolver-mapping-template-reference-dynamodb-query
*/
query: DynamoIntegrationCall<
"query",
<Query extends string, Filter extends string | undefined>(input: {
query: DynamoExpression<Query>;
filter?: DynamoExpression<Filter>;
index?: string;
nextToken?: string;
limit?: number;
scanIndexForward?: boolean;
consistentRead?: boolean;
select?: "ALL_ATTRIBUTES" | "ALL_PROJECTED_ATTRIBUTES";
}) => {
items: Item[];
nextToken: string;
scannedCount: number;
}
>;
}
class BaseTable<
Item extends object,
PartitionKey extends keyof Item,
RangeKey extends keyof Item | undefined = undefined
> implements ITable<Item, PartitionKey, RangeKey>
{
public static readonly FunctionlessType = "Table";
readonly functionlessKind = "Table";
readonly kind = "Table";
readonly tableName: string;
readonly tableArn: string;
readonly _brand?: {
Item: Item;
PartitionKey: PartitionKey;
RangeKey: RangeKey;
};
constructor(readonly resource: aws_dynamodb.ITable) {
this.tableName = resource.tableName;
this.tableArn = resource.tableArn;
}
public getItem = this.makeTableIntegration("getItem", {
appSyncVtl: {
request(call, vtl) {
const input = vtl.eval(
assertNodeKind<ObjectLiteralExpr>(
call.getArgument("input")?.expr,
"ObjectLiteralExpr"
)
);
const request = vtl.var(
'{"operation": "GetItem", "version": "2018-05-29"}'
);
vtl.qr(`${request}.put('key', ${input}.get('key'))`);
addIfDefined(vtl, input, request, "consistentRead");
return vtl.json(request);
},
},
});
public putItem = this.makeTableIntegration("putItem", {
appSyncVtl: {
request: (call, vtl) => {
const input = vtl.eval(
assertNodeKind<ObjectLiteralExpr>(
call.getArgument("input")?.expr,
"ObjectLiteralExpr"
)
);
const request = vtl.var(
'{"operation": "PutItem", "version": "2018-05-29"}'
);
vtl.qr(`${request}.put('key', ${input}.get('key'))`);
vtl.qr(
`${request}.put('attributeValues', ${input}.get('attributeValues'))`
);
addIfDefined(vtl, input, request, "condition");
addIfDefined(vtl, input, request, "_version");
return vtl.json(request);
},
},
});
public updateItem = this.makeTableIntegration("updateItem", {
appSyncVtl: {
request: (call, vtl) => {
const input = vtl.eval(
assertNodeKind<ObjectLiteralExpr>(
call.getArgument("input")?.expr,
"ObjectLiteralExpr"
)
);
const request = vtl.var(
'{"operation": "UpdateItem", "version": "2018-05-29"}'
);
vtl.qr(`${request}.put('key', ${input}.get('key'))`);
vtl.qr(`${request}.put('update', ${input}.get('update'))`);
addIfDefined(vtl, input, request, "condition");
addIfDefined(vtl, input, request, "_version");
return vtl.json(request);
},
},
});
public deleteItem = this.makeTableIntegration("deleteItem", {
appSyncVtl: {
request: (call, vtl) => {
const input = vtl.eval(
assertNodeKind<ObjectLiteralExpr>(
call.getArgument("input")?.expr,
"ObjectLiteralExpr"
)
);
const request = vtl.var(
'{"operation": "DeleteItem", "version": "2018-05-29"}'
);
vtl.qr(`${request}.put('key', ${input}.get('key'))`);
addIfDefined(vtl, input, request, "condition");
addIfDefined(vtl, input, request, "_version");
return vtl.json(request);
},
},
});
public query = this.makeTableIntegration("query", {
appSyncVtl: {
request: (call, vtl) => {
const input = vtl.eval(
assertNodeKind<ObjectLiteralExpr>(
call.getArgument("input")?.expr,
"ObjectLiteralExpr"
)
);
const request = vtl.var(
'{"operation": "Query", "version": "2018-05-29"}'
);
vtl.qr(`${request}.put('query', ${input}.get('query'))`);
addIfDefined(vtl, input, request, "index");
addIfDefined(vtl, input, request, "nextToken");
addIfDefined(vtl, input, request, "limit");
addIfDefined(vtl, input, request, "scanIndexForward");
addIfDefined(vtl, input, request, "consistentRead");
addIfDefined(vtl, input, request, "select");
return vtl.json(request);
},
},
});
private makeTableIntegration<
K extends Exclude<
keyof ITable<Item, PartitionKey, RangeKey>,
| "kind"
| "resource"
| "_brand"
| "tableName"
| "tableArn"
| "functionlessKind"
>
>(
methodName: K,
integration: Omit<
IntegrationInput<K, ITable<Item, PartitionKey, RangeKey>[K]>,
"kind" | "appSyncVtl"
> & {
appSyncVtl: Omit<AppSyncVtlIntegration, "dataSource" | "dataSourceId">;
}
): DynamoIntegrationCall<K, ITable<Item, PartitionKey, RangeKey>[K]> {
return makeIntegration<
`Table.${K}`,
ITable<Item, PartitionKey, RangeKey>[K]
>({
...integration,
kind: `Table.${methodName}`,
appSyncVtl: {
dataSourceId: () => this.resource.node.addr,
dataSource: (api, dataSourceId) => {
return new appsync.DynamoDbDataSource(api, dataSourceId, {
api,
table: this.resource,
});
},
...integration.appSyncVtl,
},
unhandledContext(kind, contextKind) {
throw new Error(
`${kind} is only allowed within a '${VTL.ContextName}' context, but was called within a '${contextKind}' context.`
);
},
});
}
}
export type DynamoIntegrationCall<
Kind extends string,
F extends AnyFunction
> = IntegrationCall<`Table.${Kind}`, F>;
/**
* Wraps an {@link aws_dynamodb.Table} with a type-safe interface that can be
* called from within an {@link AppsyncResolver}.
*
* Its interface, e.g. `getItem`, `putItem`, is in 1:1 correspondence with the
* AWS Appsync Resolver API https://docs.aws.amazon.com/appsync/latest/devguide/resolver-mapping-template-reference-dynamodb.html
*
* For example:
* ```ts
* interface Person {
* id: string;
* name: string;
* age: number;
* }
*
* const personTable = Table.fromTable<Person, "id">(
* new aws_dynamodb.Table(..)
* );
*
* const getPerson = new AppsyncResolver<
* (personId: string) => Person | undefined
* >(($context, personId: string) => {
* const person = personTable.get({
* key: {
* id: $util.toDynamoDB(personId)
* }
* });
*
* return person;
* });
* ```
*
* Note the type-signature of `Table<Person, "id">`. This declares a table whose contents
* are of the shape, `Person`, and that the PartitionKey is the `id` field.
*
* You can also specify the RangeKey:
* ```ts
* Table.fromTable<Person, "id", "age">(..)
* ```
* @see https://github.com/sam-goodwin/typesafe-dynamodb - for more information on how to model your DynamoDB table with TypeScript
*/
export class Table<
Item extends object,
PartitionKey extends keyof Item,
RangeKey extends keyof Item | undefined = undefined
> extends BaseTable<Item, PartitionKey, RangeKey> {
/**
* Wrap a {@link aws_dynamodb.Table} with Functionless.
*
* A wrapped {@link Table} provides common integrations like `getItem` and `query`.
*/
public static fromTable<
Item extends object,
PartitionKey extends keyof Item,
RangeKey extends keyof Item | undefined = undefined
>(resource: aws_dynamodb.ITable): ITable<Item, PartitionKey, RangeKey> {
return new BaseTable<Item, PartitionKey, RangeKey>(resource);
}
constructor(
scope: Construct,
id: string,
props: TableProps<
Exclude<PartitionKey, number | Symbol>,
Exclude<RangeKey, number | Symbol>
>
) {
super(new aws_dynamodb.Table(scope, id, props as aws_dynamodb.TableProps));
}
}
type AttributeKeyToObject<T> = {
[k in keyof T]: T[k] extends { S: infer S }
? S
: T[k] extends { N: `${infer N}` }
? N
: T[k] extends { B: any }
? NativeBinaryAttribute
: never;
};
function addIfDefined(vtl: VTL, from: string, to: string, key: string) {
vtl.add(
`#if(${from}.containsKey('${key}'))`,
`$util.qr(${to}.put('${key}', ${from}.get('${key}')))`,
"#end"
);
}
export type DynamoExpression<Expression extends string | undefined> =
{} & RenameKeys<
ExpressionAttributeNames<Expression> &
ExpressionAttributeValues<Expression, JsonFormat.AttributeValue> & {
expression?: Expression;
},
{
ExpressionAttributeValues: "expressionValues";
ExpressionAttributeNames: "expressionNames";
}
>;
type RenameKeys<
T extends object,
Substitutions extends Record<string, string>
> = {
[k in keyof T as k extends keyof Substitutions ? Substitutions[k] : k]: T[k];
}; | the_stack |
import { Trans, t } from "@lingui/macro";
import { withI18n } from "@lingui/react";
import { Modal } from "reactjs-components";
import PropTypes from "prop-types";
import * as React from "react";
import ModalHeading from "#SRC/js/components/modals/ModalHeading";
import TabForm from "#SRC/js/components/TabForm";
import Util from "#SRC/js/utils/Util";
import ACLDirectoriesStore from "../stores/ACLDirectoriesStore";
import FieldDefinitions from "../constants/FieldDefinitions";
const BLACKLISTED_FORM_FIELDS = [
"group-search-base",
"group-search-filter-template",
"ssl-tls-configuration",
"authentication-bind-type",
"template-bind-type",
"user-search-base",
"user-search-filter-template",
"ca-certs",
"client-cert",
];
class DirectoryFormModal extends React.Component {
static defaultProps = {
changeModalOpenState() {},
editMode: true,
model: {},
onFormSubmit() {},
};
static propTypes = {
changeModalOpenState: PropTypes.func,
editMode: PropTypes.bool,
model: PropTypes.object,
modalOpen: PropTypes.bool.isRequired,
onFormSubmit: PropTypes.func,
};
state = {
isCACertRequired: false,
formData: {
"ssl-tls-configuration": null,
"template-bind-type": null,
},
};
triggerTabFormSubmit = null;
handleAddDirectoryClick = () => {
this.triggerTabFormSubmit();
};
handleFormChange = (formData, changeEvent) => {
const { fieldName, fieldValue } = changeEvent;
if (
fieldName === "template-bind-type" ||
fieldName === "authentication-bind-type"
) {
const selectedOption = fieldValue.find(
(radioValue) => radioValue.checked
);
this.setState({
formData: {
...this.state.formData,
[fieldName]: selectedOption.name,
},
});
} else if (fieldName === "client-cert") {
const isCACertRequired =
fieldValue != null && fieldValue !== "" && fieldValue !== "\n";
if (isCACertRequired !== this.state.isCACertRequired) {
this.setState({ isCACertRequired });
}
}
};
handleFormSubmit = (formData) => {
ACLDirectoriesStore.addDirectory(this.processFormData(formData));
this.props.onFormSubmit();
};
getFieldValue(fieldName) {
const defaultValue = "";
const { model } = this.props;
if (fieldName === "ssl-tls-configuration") {
return this.getSSLTLSConfigValue();
}
if (fieldName === "authentication-bind-type") {
return this.getBindTypeValue();
}
if (fieldName === "template-bind-type") {
return this.getTemplateBindTypeValue();
}
// Get a nested value if the requested fieldName contains a period.
if (fieldName.indexOf(".") > 0) {
return Util.findNestedPropertyInObject(model, fieldName) || defaultValue;
}
return model[fieldName] || defaultValue;
}
getModalFooter() {
const { modalDisabled } = this.props;
let actionText = modalDisabled ? (
<Trans render="span">Adding...</Trans>
) : (
<Trans render="span">Add Directory</Trans>
);
if (this.props.editMode) {
actionText = modalDisabled ? (
<Trans render="span">Saving...</Trans>
) : (
<Trans render="span">Save Configuration</Trans>
);
}
return (
<div className="flush-bottom flex flex-direction-top-to-bottom flex-align-items-stretch-screen-small flex-direction-left-to-right-screen-small flex-justify-items-space-between-screen-medium">
<button
className="button button-primary-link flush-left"
onClick={this.props.changeModalOpenState.bind(null, false)}
disabled={modalDisabled}
>
<Trans render="span">Cancel</Trans>
</button>
<button
className="button button-primary"
onClick={this.handleAddDirectoryClick.bind(this)}
disabled={modalDisabled}
>
{actionText}
</button>
</div>
);
}
getModalHeader() {
return this.props.editMode ? (
<Trans render={<ModalHeading />}>Edit Directory</Trans>
) : (
<Trans render={<ModalHeading />}>Add Directory</Trans>
);
}
getSSLTLSConfigValue() {
if (!this.props.editMode) {
return "enforce-starttls";
}
const { model = {} } = this.props;
if (model["use-ldaps"]) {
return "use-ldaps";
}
if (model["enforce-starttls"]) {
return "enforce-starttls";
}
return "ssl-tls-configuration-default-value";
}
getBindTypeValue() {
if (this.state.formData["authentication-bind-type"] != null) {
return this.state.formData["authentication-bind-type"];
}
const { model = {}, editMode } = this.props;
if (!editMode) {
return "anonymous-bind";
}
if (model["lookup-dn"] || model["lookup-password"]) {
return "ldap-credentials";
}
return "anonymous-bind";
}
getTemplateBindTypeValue() {
if (this.state.formData["template-bind-type"] != null) {
return this.state.formData["template-bind-type"];
}
const { model = {}, editMode } = this.props;
if (!editMode) {
return "simple-bind-template";
}
if (model["user-search"]) {
return "search-bind";
}
return "simple-bind-template";
}
getTriggerTabFormSubmit = (submitTrigger) => {
this.triggerTabFormSubmit = submitTrigger;
};
getConnectionDefinition() {
const { i18n } = this.props;
const optionalText = i18n._(t`Optional`);
return [
[
{
fieldType: "text",
name: "host",
placeholder: i18n._(FieldDefinitions.host.fieldPlaceholder),
showLabel: (
<Trans render="label" id={FieldDefinitions.host.displayName} />
),
writeType: "input",
validation() {
return true;
},
value: this.getFieldValue("host"),
},
{
fieldType: "text",
name: "port",
placeholder: i18n._(FieldDefinitions.port.fieldPlaceholder),
showLabel: (
<Trans render="label" id={FieldDefinitions.port.displayName} />
),
writeType: "input",
value: this.getFieldValue("port"),
},
],
{
buttonClassName: "button-outline button-split-content",
fieldType: "select",
formElementClass: "directory-form-encryption-dropdown",
matchButtonWidth: true,
name: "ssl-tls-configuration",
options: [
{
html: (
<div className="button-split-content-wrapper">
<Trans
render="span"
className="button-split-content-item"
id={FieldDefinitions["use-ldaps"].fieldLabel}
/>
</div>
),
id: "use-ldaps",
},
{
html: (
<div className="button-split-content-wrapper">
<Trans
render="span"
className="button-split-content-item"
id={FieldDefinitions["enforce-starttls"].fieldLabel}
/>
</div>
),
id: "enforce-starttls",
},
{
html: (
<div className="button-split-content-wrapper">
<Trans
render="span"
className="button-split-content-item"
id={
FieldDefinitions["ssl-tls-configuration-default-value"]
.fieldLabel
}
/>
</div>
),
id: "ssl-tls-configuration-default-value",
},
],
required: false,
showLabel: (
<Trans
render="label"
id={FieldDefinitions["ssl-tls-configuration"].displayName}
/>
),
validation() {
return true;
},
value: this.getFieldValue("ssl-tls-configuration"),
wrapperClassName: "dropdown dropdown-wide",
},
{
fieldType: "textarea",
name: "client-cert",
placeholder: i18n._(FieldDefinitions["client-cert"].fieldPlaceholder),
showLabel: (
<label>
<Trans
render="span"
id={FieldDefinitions["client-cert"].displayName}
/>{" "}
({optionalText})
</label>
),
writeType: "input",
validation() {
return true;
},
value: this.getFieldValue("client-cert"),
},
{
fieldType: "textarea",
name: "ca-certs",
placeholder: i18n._(FieldDefinitions["ca-certs"].fieldPlaceholder),
required: this.state.isCACertRequired,
showLabel: this.getCACertLabel(
FieldDefinitions["ca-certs"].displayName
),
writeType: "input",
validation() {
return true;
},
value: this.getFieldValue("ca-certs"),
},
];
}
getAuthenticationBindDefinition() {
const { i18n } = this.props;
const conditionalAuthenticationBindFields = {
"anonymous-bind": [],
"ldap-credentials": [
{
fieldType: "text",
name: "lookup-dn",
placeholder: i18n._(FieldDefinitions["lookup-dn"].fieldPlaceholder),
showLabel: (
<Trans
render="label"
id={FieldDefinitions["lookup-dn"].displayName}
/>
),
writeType: "input",
validation() {
return true;
},
value: this.getFieldValue("lookup-dn"),
},
{
fieldType: "password",
name: "lookup-password",
placeholder: i18n._(
FieldDefinitions["lookup-password"].fieldPlaceholder
),
showLabel: (
<Trans
render="label"
id={FieldDefinitions["lookup-password"].displayName}
/>
),
writeType: "input",
validation() {
return true;
},
value: this.getFieldValue("lookup-password"),
},
],
};
return [
{
fieldType: "radioButton",
name: "authentication-bind-type",
labelClass: "form-control-inline flush-bottom",
showLabel: (
<Trans
render="label"
id={FieldDefinitions["authentication-bind-type"].displayName}
/>
),
validation() {
return true;
},
value: [
{
name: "anonymous-bind",
label: i18n._(
FieldDefinitions["authentication-bind-type.anonymous-bind"]
.displayName
),
checked:
this.getFieldValue("authentication-bind-type") ===
"anonymous-bind",
},
{
name: "ldap-credentials",
label: i18n._(
FieldDefinitions["authentication-bind-type.ldap-credentials"]
.displayName
),
checked:
this.getFieldValue("authentication-bind-type") ===
"ldap-credentials",
},
],
},
].concat(
conditionalAuthenticationBindFields[
this.getFieldValue("authentication-bind-type")
] || []
);
}
getTemplateBindDefinition() {
const { i18n } = this.props;
const conditionalTemplateBindFields = {
"simple-bind-template": [
{
fieldType: "text",
name: "dntemplate",
placeholder: i18n._(FieldDefinitions.dntemplate.fieldPlaceholder),
showLabel: (
<Trans
render="label"
id={FieldDefinitions.dntemplate.displayName}
/>
),
writeType: "input",
validation() {
return true;
},
value: this.getFieldValue("dntemplate"),
},
],
"search-bind": [
{
fieldType: "text",
name: "user-search-base",
placeholder: i18n._(
FieldDefinitions["user-search.search-base"].fieldPlaceholder
),
showLabel: (
<Trans
render="label"
id={FieldDefinitions["user-search.search-base"].displayName}
/>
),
writeType: "input",
validation() {
return true;
},
value: this.getFieldValue("user-search.search-base"),
},
{
fieldType: "text",
name: "user-search-filter-template",
placeholder: i18n._(
FieldDefinitions["user-search.search-filter-template"]
.fieldPlaceholder
),
showLabel: (
<Trans
render="label"
id={
FieldDefinitions["user-search.search-filter-template"]
.displayName
}
/>
),
writeType: "input",
validation() {
return true;
},
value: this.getFieldValue("user-search.search-filter-template"),
},
],
};
return [
{
fieldType: "radioButton",
name: "template-bind-type",
labelClass: "form-control-inline flush-bottom",
showLabel: (
<Trans
render="label"
id={FieldDefinitions["template-bind-type"].displayName}
/>
),
validation() {
return true;
},
value: [
{
name: "simple-bind-template",
label: i18n._(
FieldDefinitions["template-bind-type.simple-bind-template"]
.displayName
),
checked:
this.getFieldValue("template-bind-type") ===
"simple-bind-template",
},
{
name: "search-bind",
label: i18n._(
FieldDefinitions["template-bind-type.search-bind"].displayName
),
checked: this.getFieldValue("template-bind-type") === "search-bind",
},
],
},
].concat(
conditionalTemplateBindFields[this.getFieldValue("template-bind-type")] ||
[]
);
}
getAuthenticationMethodDefinition() {
return this.getAuthenticationBindDefinition().concat(
this.getTemplateBindDefinition()
);
}
getCACertLabel(label) {
const { i18n } = this.props;
const optionalText = i18n._(t`Optional`);
if (!this.state.isCACertRequired) {
return (
<label>
<Trans render="span" id={label} /> ({optionalText})
</label>
);
}
return <Trans render="label" id={label} />;
}
getGroupImportDefinition() {
const { i18n } = this.props;
return [
{
fieldType: "text",
name: "group-search-base",
placeholder: i18n._(
FieldDefinitions["group-search.search-base"].fieldPlaceholder
),
showLabel: (
<Trans
render="label"
id={FieldDefinitions["group-search.search-base"].displayName}
/>
),
writeType: "input",
validation() {
return true;
},
value: this.getFieldValue("group-search.search-base"),
},
{
fieldType: "text",
name: "group-search-filter-template",
placeholder: i18n._(
FieldDefinitions["group-search.search-filter-template"]
.fieldPlaceholder
),
showLabel: (
<Trans
render="label"
id={
FieldDefinitions["group-search.search-filter-template"]
.displayName
}
/>
),
writeType: "input",
validation() {
return true;
},
value: this.getFieldValue("group-search.search-filter-template"),
},
];
}
getModalFormDefinition() {
const { i18n } = this.props;
return {
connection: {
title: i18n._("Connection"),
selectValue: "connection",
definition: this.getConnectionDefinition(),
},
"authentication-method": {
title: i18n._("Authentication"),
selectValue: "authentication-method",
definition: this.getAuthenticationMethodDefinition(),
},
"group-import": {
title: i18n._("Group Import (Optional)"),
selectValue: "group-import",
definition: this.getGroupImportDefinition(),
},
};
}
processFormData(formData) {
const processedData = {
...this.props.model,
};
// Transform the nested form definition into an object that the API expects.
Object.keys(formData).forEach((formGroupName) => {
const fieldGroup = formData[formGroupName];
Object.keys(fieldGroup).forEach((fieldName) => {
const fieldValue = fieldGroup[fieldName];
// This dropdown's value determines two fields, which must be explicitly
// defined. It is possible for only one to be true, or both to be false.
if (fieldName === "ssl-tls-configuration") {
processedData["enforce-starttls"] = fieldValue === "enforce-starttls";
processedData["use-ldaps"] = fieldValue === "use-ldaps";
}
// If either user-search-base or user-search-filter-template are present
// then we need to explictly set both fields on a nested object, AND
// set lookup-dn to null.
if (
fieldName === "user-search-base" ||
fieldName === "user-search-filter-template"
) {
// Create the nested object user-search if necessary.
if (processedData["user-search"] == null) {
processedData["user-search"] = {};
}
if (fieldName === "user-search-base") {
processedData["user-search"]["search-base"] = fieldValue;
}
if (fieldName === "user-search-filter-template") {
processedData["user-search"]["search-filter-template"] = fieldValue;
}
// dntemplate and the user-search fields are mutually exlusive. Thus,
// dntemplate must be undefined if the preceeding values are present.
delete processedData.dntemplate;
}
// If either group-search-base or group-search-filter-template are
// present then we need to explictly define a nested object including
// both fields, even if one is null. We don't create the nested object
// if both fields are empty.
if (
fieldName === "group-search-base" ||
fieldName === "group-search-filter-template"
) {
const shouldIncludeGroupSearch =
fieldGroup["group-search-base"] != null ||
fieldGroup["group-search-filter-template"] != null;
if (shouldIncludeGroupSearch) {
// Create the nested object group-search if necessary.
if (processedData["group-search"] == null) {
processedData["group-search"] = {};
}
if (fieldName === "group-search-base") {
processedData["group-search"]["search-base"] = fieldValue;
}
if (fieldName === "group-search-filter-template") {
processedData["group-search"][
"search-filter-template"
] = fieldValue;
}
}
}
if (
(fieldName === "client-cert" || fieldName === "ca-certs") &&
fieldValue != null &&
fieldValue !== ""
) {
processedData[fieldName] = fieldValue;
}
// Exclude all blacklisted form fields.
if (!BLACKLISTED_FORM_FIELDS.includes(fieldName)) {
processedData[fieldName] = fieldValue;
}
});
});
return processedData;
}
render() {
return (
<Modal
footer={this.getModalFooter()}
header={this.getModalHeader()}
modalWrapperClass="multiple-form-modal modal-form"
onClose={this.props.changeModalOpenState.bind(null, false)}
open={this.props.modalOpen}
scrollContainerClass="multiple-form-modal-body"
closeByBackdropClick={false}
showHeader={true}
showFooter={true}
>
<TabForm
definition={this.getModalFormDefinition()}
onSubmit={this.handleFormSubmit}
getTriggerSubmit={this.getTriggerTabFormSubmit}
onChange={this.handleFormChange}
/>
</Modal>
);
}
}
export default withI18n()(DirectoryFormModal); | the_stack |
import { member } from "babyioc";
import {
Actor as BattleMovrActor,
BattleInfo as BattleInfoBase,
OnBattleComplete,
Statistic,
Statistics,
TeamBase,
TeamDescriptor,
UnderEachTeam,
TeamId,
} from "battlemovr";
import { Section } from "eightbittr";
import { FullScreenPokemon } from "../FullScreenPokemon";
import { ActionsOrderer } from "./battles/ActionsOrderer";
import { Animations } from "./battles/Animations";
import { Decorations } from "./battles/Decorations";
import { Selectors } from "./battles/Selectors";
import { Status } from "./battles/Statuses";
import { Actors } from "./battles/Actors";
import { BattleTextGenerators, PartialTextGenerators } from "./constants/battles/Texts";
import { Menu } from "./Menus";
import { StateSaveable } from "./Saves";
import { Actor } from "./Actors";
/**
* Party Pokemon that can participate in battles.
*/
export interface Pokemon extends BattleMovrActor, StateSaveable {
/**
* How many experience points the actor has.
*/
experience: number;
/**
* Power level the actor is.
*/
level: number;
/**
* Accumulated effort value points.
*/
ev: ValuePoints;
/**
* Item held by the actor.
*/
item?: string[];
/**
* Accumulated individual value points.
*/
iv: ValuePoints;
/**
* How likely a critical hit is from this Pokemon.
*/
raisedCriticalHitProbability?: boolean;
/**
* Battle attribute statistics.
*/
statistics: PokemonStatistics;
/**
* Any current status effect.
*/
status?: Status;
/**
* What types this Pokemon is, such as "Water".
*/
types: string[];
}
/**
* Effort or individual value points for a Pokemon.
*/
export interface ValuePoints {
/**
* Attack value points.
*/
attack: number;
/**
* Defense value points.
*/
defense: number;
/**
* Health value points.
*/
health: number;
/**
* Special value points.
*/
special: number;
/**
* Speed value points.
*/
speed: number;
}
/**
* Statistics for a Pokemon.
*/
export interface PokemonStatistics extends Statistics {
/**
* The Pokemon's attack.
*/
attack: Statistic;
/**
* The pokemon's defense.
*/
defense: Statistic;
/**
* The Pokemon's special.
*/
special: Statistic;
/**
* The Pokemon's speed.
*/
speed: Statistic;
}
/**
* An enemy team attacking the player.
*/
export interface EnemyTeam extends TeamDescriptor {
/**
* A badge to gift when defeated.
*/
badge?: string;
/**
* Whether this opponent doesn't understand status effects in move priority generation.
*/
dumb?: boolean;
/**
* A gift to give after defeated in battle.
*/
giftAfterBattle?: string;
/**
* A cutscene to trigger after defeated in battle.
*/
nextCutscene?: string;
/**
* A monetary reward to give after defeated in battle.
*/
reward?: number;
/**
* Whether this opponent should override its type for better moves in move priority generation.
*/
smart?: boolean;
}
/**
* Actors displayed in a battle.
*/
export interface BattleActors extends UnderEachTeam<Actor> {
/**
* Solid background color behind everyactor.
*/
background: Actor;
/**
* Menu surrounding the battle area.
*/
menu: Menu;
}
/**
* Battle options specific to FullScreenPokemon.
*/
export interface PokemonBattleOptions {
/**
* Texts to display in menus.
*/
texts: BattleTextGenerators;
/**
* Audio theme to play during the battle.
*/
theme: string;
/**
* Actors displayed in the battle.
*/
actors: BattleActors;
}
/**
* Minimum options to start a new battle.
*/
export interface PartialBattleOptions {
/**
* Whether the battle should advance its menus automatically.
*/
automaticMenus?: boolean;
/**
* Actors that should be visible above the starting animation.
*/
keptActors?: Actor[];
/**
* Callback for when the battle is complete.
*/
onComplete?: OnBattleComplete;
/**
* Transition name to start with, if not a random one.
*/
startTransition?: string;
/**
* Opposing teams in the battle.
*/
teams?: Partial<UnderEachTeam<Partial<TeamDescriptor>>> & {
opponent?: Partial<EnemyTeam>;
};
/**
* Texts to display in menus.
*/
texts?: Partial<PartialTextGenerators>;
/**
* Audio theme to be played after the battle is done.
*/
endingtheme?: string;
}
/**
* Complete options to start a new battle.
*/
export interface BattleOptions extends PartialBattleOptions {
/**
* Callback for when the battle is complete.
*/
onComplete: OnBattleComplete;
/**
* Opposing teams in the battle.
*/
teams: UnderEachTeam<TeamDescriptor>;
}
/**
* Common attributes for teams of Pokemon.
*/
export interface BattleTeam extends TeamBase, EnemyTeam {
/**
* Pokemon that will fight.
*/
actors: Pokemon[];
/**
* The currently selected Pokemon.
*/
selectedActor: Pokemon;
}
/**
* Information on an in-progress battle.
*/
export type BattleInfo = BattleInfoBase &
BattleOptions &
PokemonBattleOptions & {
/**
* How many times the player has failed to flee.
*/
fleeAttempts: number;
/**
* Opposing teams in the battle.
*/
teams: UnderEachTeam<BattleTeam>;
};
/**
* BattleMovr hooks to run trainer battles.
*/
export class Battles extends Section<FullScreenPokemon> {
/**
* Orders chosen actions by priority and/or speed.
*/
@member(ActionsOrderer)
public readonly actionsOrderer: ActionsOrderer;
/**
* Animations for battle events.
*/
@member(Animations)
public readonly animations: Animations;
/**
* Places decorative in-battle Actors.
*/
@member(Decorations)
public readonly decorations: Decorations;
/**
* Selects actions for each team.
*/
@member(Selectors)
public readonly selectors: Selectors;
/**
* Sets Actors visually representing each team.
*/
@member(Actors)
public readonly actors: Actors;
/**
* Starts a new battle.
*
* @param partialBattleOptions Options to start the battle.
*/
public startBattle(partialBattleOptions: PartialBattleOptions): void {
const battleOptions: BattleOptions = this.fillOutBattleOptions(partialBattleOptions);
this.game.battleMover.startBattle(battleOptions);
}
/**
* Heals a Pokemon back to full health.
*
* @param pokemon An in-game Pokemon to heal.
*/
public healPokemon(pokemon: Pokemon): void {
for (const statisticName of this.game.constants.pokemon.statisticNames) {
pokemon.statistics[statisticName].current = pokemon.statistics[statisticName].normal;
}
for (const move of pokemon.moves) {
move.remaining = this.game.constants.moves.byName[move.title].PP;
}
pokemon.status = undefined;
}
/**
* Tests to see if all party Pokemon have fainted.
*
* @returns Whether a player's party is wiped.
*/
public isPartyWiped(): boolean {
for (const chosenPokemon of this.game.itemsHolder.getItem(
this.game.storage.names.pokemonInParty
)) {
if (chosenPokemon.statistics.health.current !== 0) {
return false;
}
}
return true;
}
/**
* Heals party back to full health.
*/
public healParty(): void {
for (const pokemon of this.game.itemsHolder.getItem(
this.game.storage.names.pokemonInParty
)) {
this.game.battles.healPokemon(pokemon);
}
}
/**
* Checks whether a team is allowed to flee (not facing a trainer).
*
* @param teamId A team in battle.
* @returns Whether the team can flee.
*/
public canTeamAttemptFlee(teamId: TeamId): boolean {
const battleInfo: BattleInfo = this.game.battleMover.getBattleInfo() as BattleInfo;
return !(teamId === TeamId.opponent ? battleInfo.teams.player : battleInfo.teams.opponent)
.leader;
}
/**
* Fills in default values for battle options.
*
* @param partialBattleOptions Partial options to start a battle.
* @returns Completed options to start a battle.
*/
private fillOutBattleOptions(partialBattleOptions: PartialBattleOptions): BattleOptions {
const texts: BattleTextGenerators = this.game.utilities.proliferate(
{},
{
...this.game.constants.battles.texts.defaultBattleTexts,
...partialBattleOptions.texts,
}
);
const teams: UnderEachTeam<TeamDescriptor> = {
opponent: {
actors: [],
selector: "opponent",
},
player: {
actors: this.game.itemsHolder.getItem(this.game.storage.names.pokemonInParty),
leader: {
nickname: this.game.itemsHolder.getItem(this.game.storage.names.name),
title: "PlayerBack".split(""),
},
selector: "player",
},
};
if (partialBattleOptions.teams) {
if (partialBattleOptions.teams.opponent) {
teams.opponent = {
...teams.opponent,
...partialBattleOptions.teams.opponent,
} as TeamDescriptor;
}
if (partialBattleOptions.teams.player) {
teams.player = {
...teams.player,
...partialBattleOptions.teams.player,
};
}
}
if (partialBattleOptions.endingtheme === undefined) {
partialBattleOptions.endingtheme = this.game.mapScreener.theme;
}
return {
...partialBattleOptions,
fleeAttempts: 0,
teams,
texts,
theme: "Battle Trainer",
} as BattleOptions;
}
} | the_stack |
import MdlElement from "../element/mdl_element";
export default class MdlLayout extends MdlElement{
header_: HTMLElement;
drawer_: HTMLElement;
content_: HTMLElement;
tabBar_: HTMLElement;
obfuscator_: HTMLElement;
Keycodes_: {ENTER: number, ESCAPE: number, SPACE: number};
Mode_: {STANDARD: number, SEAMED: number, WATERFALL: number, SCROLL: number};
contentScrollHandler_: () => void;
keyboardEventHandler_: (evt: KeyboardEvent) => void;
screenSizeHandler_: () => void;
drawerToggleHandler_: (evt: KeyboardEvent) => void;
headerClickHandler_: () => void;
headerTransitionEndHandler_: () => void;
resetTabState_: (tabBar:any) => void;
resetPanelState_: (panels:any) => void;
toggleDrawer: () => void;
activateTab: (idTab: string) => void;
getTab: (href: string) => HTMLElement | void;
setTabActive: (tab: HTMLElement) => void;
setTabContentActive: (id: string) => void;
constructor(el: HTMLElement){super(el)}
// headerTransitionHandler_: Function;
}
MdlLayout.prototype.Constant_ = {
MAX_WIDTH: '(max-width: 1024px)',
TAB_SCROLL_PIXELS: 100,
RESIZE_TIMEOUT: 100,
MENU_ICON: '',
CHEVRON_LEFT: 'chevron_left',
CHEVRON_RIGHT: 'chevron_right'
};
/**
* Keycodes, for code readability.
*/
MdlLayout.prototype.Keycodes_ = {
ENTER: 13,
ESCAPE: 27,
SPACE: 32
};
MdlLayout.prototype.Mode_ = {
STANDARD: 0,
SEAMED: 1,
WATERFALL: 2,
SCROLL: 3
};
MdlLayout.prototype.CssClasses_ = {
CONTAINER: 'mdl-layout__container',
HEADER: 'mdl-layout__header',
DRAWER: 'mdl-layout__drawer',
CONTENT: 'mdl-layout__content',
DRAWER_BTN: 'mdl-layout__drawer-button',
ICON: 'material-icons',
JS_RIPPLE_EFFECT: 'mdl-js-ripple-effect',
RIPPLE_CONTAINER: 'mdl-layout__tab-ripple-container',
RIPPLE: 'mdl-ripple',
RIPPLE_IGNORE_EVENTS: 'mdl-js-ripple-effect--ignore-events',
HEADER_SEAMED: 'mdl-layout__header--seamed',
HEADER_WATERFALL: 'mdl-layout__header--waterfall',
HEADER_SCROLL: 'mdl-layout__header--scroll',
FIXED_HEADER: 'mdl-layout--fixed-header',
OBFUSCATOR: 'mdl-layout__obfuscator',
TAB_BAR: 'mdl-layout__tab-bar',
TAB_CONTAINER: 'mdl-layout__tab-bar-container',
TAB: 'mdl-layout__tab',
TAB_BAR_BUTTON: 'mdl-layout__tab-bar-button',
TAB_BAR_LEFT_BUTTON: 'mdl-layout__tab-bar-left-button',
TAB_BAR_RIGHT_BUTTON: 'mdl-layout__tab-bar-right-button',
PANEL: 'mdl-layout__tab-panel',
HAS_DRAWER: 'has-drawer',
HAS_TABS: 'has-tabs',
HAS_SCROLLING_HEADER: 'has-scrolling-header',
CASTING_SHADOW: 'is-casting-shadow',
IS_COMPACT: 'is-compact',
IS_SMALL_SCREEN: 'is-small-screen',
IS_DRAWER_OPEN: 'is-visible',
IS_ACTIVE: 'is-active',
IS_UPGRADED: 'is-upgraded',
IS_ANIMATING: 'is-animating',
ON_LARGE_SCREEN: 'mdl-layout--large-screen-only',
ON_SMALL_SCREEN: 'mdl-layout--small-screen-only'
};
/**
* Handles scrolling on the content.
*/
MdlLayout.prototype.contentScrollHandler_ = function () {
if (this.header_.classList.contains(this.CssClasses_.IS_ANIMATING)) {
return;
}
var headerVisible =
!this.element_.classList.contains(this.CssClasses_.IS_SMALL_SCREEN) ||
this.element_.classList.contains(this.CssClasses_.FIXED_HEADER);
if (this.content_.scrollTop > 0 && !this.header_.classList.contains(this.CssClasses_.IS_COMPACT)) {
this.header_.classList.add(this.CssClasses_.CASTING_SHADOW);
this.header_.classList.add(this.CssClasses_.IS_COMPACT);
if (headerVisible) {
this.header_.classList.add(this.CssClasses_.IS_ANIMATING);
}
} else if (this.content_.scrollTop <= 0 && this.header_.classList.contains(this.CssClasses_.IS_COMPACT)) {
this.header_.classList.remove(this.CssClasses_.CASTING_SHADOW);
this.header_.classList.remove(this.CssClasses_.IS_COMPACT);
if (headerVisible) {
this.header_.classList.add(this.CssClasses_.IS_ANIMATING);
}
}
};
/**
* Handles a keyboard event on the drawer.
* @param {KeyboardEvent} evt The event that fired.
*/
MdlLayout.prototype.keyboardEventHandler_ = function (evt: KeyboardEvent) {
// Only react when the drawer is open.
if (evt.keyCode === this.Keycodes_.ESCAPE && this.drawer_.classList.contains(this.CssClasses_.IS_DRAWER_OPEN)) {
this.toggleDrawer();
}
};
/**
* Handles changes in screen size.
*/
MdlLayout.prototype.screenSizeHandler_ = function () {
if (this.screenSizeMediaQuery_.matches) {
this.element_.classList.add(this.CssClasses_.IS_SMALL_SCREEN);
} else {
this.element_.classList.remove(this.CssClasses_.IS_SMALL_SCREEN);
// Collapse drawer (if any) when moving to a large screen size.
if (this.drawer_) {
this.drawer_.classList.remove(this.CssClasses_.IS_DRAWER_OPEN);
this.obfuscator_.classList.remove(this.CssClasses_.IS_DRAWER_OPEN);
}
}
};
/**
* Handles events of drawer button.
* @param {KeyboardEvent} evt The event that fired.
*/
MdlLayout.prototype.drawerToggleHandler_ = function (evt: KeyboardEvent) {
if (evt && evt.type === 'keydown') {
if (evt.keyCode === this.Keycodes_.SPACE || evt.keyCode === this.Keycodes_.ENTER) {
// prevent scrolling in drawer nav
evt.preventDefault();
} else {
// prevent other keys
return;
}
}
this.toggleDrawer();
};
/**
* Handles (un)setting the `is-animating` class
*/
MdlLayout.prototype.headerTransitionEndHandler_ = function () {
this.header_.classList.remove(this.CssClasses_.IS_ANIMATING);
};
/**
* Handles expanding the header on click
*/
MdlLayout.prototype.headerClickHandler_ = function () {
if (this.header_.classList.contains(this.CssClasses_.IS_COMPACT)) {
this.header_.classList.remove(this.CssClasses_.IS_COMPACT);
this.header_.classList.add(this.CssClasses_.IS_ANIMATING);
}
};
/**
* Reset tab state, dropping active classes
*/
MdlLayout.prototype.resetTabState_ = function (tabBar:any) {
for (var k = 0; k < tabBar.length; k++) {
tabBar[k].classList.remove(this.CssClasses_.IS_ACTIVE);
}
};
/**
* Reset panel state, droping active classes
*/
MdlLayout.prototype.resetPanelState_ = function (panels:any) {
for (var j = 0; j < panels.length; j++) {
panels[j].classList.remove(this.CssClasses_.IS_ACTIVE);
}
};
/**
* Toggle drawer state
*/
MdlLayout.prototype.toggleDrawer = function () {
var drawerButton = this.element_.querySelector('.' + this.CssClasses_.DRAWER_BTN);
this.drawer_.classList.toggle(this.CssClasses_.IS_DRAWER_OPEN);
this.obfuscator_.classList.toggle(this.CssClasses_.IS_DRAWER_OPEN);
// Set accessibility properties.
if (this.drawer_.classList.contains(this.CssClasses_.IS_DRAWER_OPEN)) {
this.drawer_.setAttribute('aria-hidden', 'false');
drawerButton.setAttribute('aria-expanded', 'true');
} else {
this.drawer_.setAttribute('aria-hidden', 'true');
drawerButton.setAttribute('aria-expanded', 'false');
}
};
/**
* Initialize element.
*/
MdlLayout.prototype.init = function () {
if (this.element_) {
var focusedElement = this.element_.querySelector(':focus');
if (focusedElement) {
focusedElement.focus();
}
/*
var directChildren = this.element_.childNodes;
var numChildren = directChildren.length;
for (var c = 0; c < numChildren; c++) {
var child = directChildren[c];
if (child.classList && child.classList.contains(this.CssClasses_.HEADER)) {
this.header_ = child;
}
if (child.classList && child.classList.contains(this.CssClasses_.DRAWER)) {
this.drawer_ = child;
}
if (child.classList && child.classList.contains(this.CssClasses_.CONTENT)) {
this.content_ = child;
}
}
*/
this.header_ = this.element_.querySelector('ml-header');
this.drawer_ = this.element_.querySelector('ml-drawer');
this.content_ = this.element_.querySelector('ml-content');
window.addEventListener('pageshow', function (e:any) {
if (e.persisted) {
// when page is loaded from back/forward cache
// trigger repaint to let layout scroll in safari
this.element_.style.overflowY = 'hidden';
requestAnimationFrame(function () {
this.element_.style.overflowY = '';
}.bind(this));
}
}.bind(this), false);
if (this.header_) {
this.tabBar_ = this.header_.querySelector('.' + this.CssClasses_.TAB_BAR);
}
var mode = this.Mode_.STANDARD;
if (this.header_) {
if (this.header_.classList.contains(this.CssClasses_.HEADER_SEAMED)) {
mode = this.Mode_.SEAMED;
} else if (this.header_.classList.contains(this.CssClasses_.HEADER_WATERFALL)) {
mode = this.Mode_.WATERFALL;
this.header_.addEventListener('transitionend', this.headerTransitionEndHandler_.bind(this));
this.header_.addEventListener('click', this.headerClickHandler_.bind(this));
} else if (this.header_.classList.contains(this.CssClasses_.HEADER_SCROLL)) {
mode = this.Mode_.SCROLL;
//todo: esto puede tener que ver con que no funcione scrolling header
// container.classList.add(this.CssClasses_.HAS_SCROLLING_HEADER);
}
if (mode === this.Mode_.STANDARD) {
this.header_.classList.add(this.CssClasses_.CASTING_SHADOW);
if (this.tabBar_) {
this.tabBar_.classList.add(this.CssClasses_.CASTING_SHADOW);
}
} else if (mode === this.Mode_.SEAMED || mode === this.Mode_.SCROLL) {
this.header_.classList.remove(this.CssClasses_.CASTING_SHADOW);
if (this.tabBar_) {
this.tabBar_.classList.remove(this.CssClasses_.CASTING_SHADOW);
}
} else if (mode === this.Mode_.WATERFALL) {
// Add and remove shadows depending on scroll position.
// Also add/remove auxiliary class for styling of the compact version of
// the header.
this.content_.addEventListener('scroll', this.contentScrollHandler_.bind(this));
this.contentScrollHandler_();
}
}
// Add drawer toggling button to our layout, if we have an openable drawer.
if (this.drawer_) {
var drawerButton = this.element_.querySelector('.' + this.CssClasses_.DRAWER_BTN);
if (!drawerButton) {
drawerButton = document.createElement('div');
drawerButton.setAttribute('aria-expanded', 'false');
drawerButton.setAttribute('role', 'button');
drawerButton.setAttribute('tabindex', '0');
drawerButton.classList.add(this.CssClasses_.DRAWER_BTN);
var drawerButtonIcon = document.createElement('i');
drawerButtonIcon.classList.add(this.CssClasses_.ICON);
drawerButtonIcon.innerHTML = this.Constant_.MENU_ICON;
drawerButton.appendChild(drawerButtonIcon);
}
if (this.drawer_.classList.contains(this.CssClasses_.ON_LARGE_SCREEN)) {
//If drawer has ON_LARGE_SCREEN class then add it to the drawer toggle button as well.
drawerButton.classList.add(this.CssClasses_.ON_LARGE_SCREEN);
} else if (this.drawer_.classList.contains(this.CssClasses_.ON_SMALL_SCREEN)) {
//If drawer has ON_SMALL_SCREEN class then add it to the drawer toggle button as well.
drawerButton.classList.add(this.CssClasses_.ON_SMALL_SCREEN);
}
drawerButton.addEventListener('click', this.drawerToggleHandler_.bind(this));
drawerButton.addEventListener('keydown', this.drawerToggleHandler_.bind(this));
// Add a class if the layout has a drawer, for altering the left padding.
// Adds the HAS_DRAWER to the elements since this.header_ may or may
// not be present.
this.element_.classList.add(this.CssClasses_.HAS_DRAWER);
// If we have a fixed header, add the button to the header rather than
// the layout.
if (this.element_.classList.contains(this.CssClasses_.FIXED_HEADER)) {
this.header_.insertBefore(drawerButton, this.header_.firstChild);
} else {
this.element_.insertBefore(drawerButton, this.content_);
}
var obfuscator = document.createElement('div');
obfuscator.classList.add(this.CssClasses_.OBFUSCATOR);
this.element_.appendChild(obfuscator);
obfuscator.addEventListener('click', this.drawerToggleHandler_.bind(this));
this.obfuscator_ = obfuscator;
this.drawer_.addEventListener('keydown', this.keyboardEventHandler_.bind(this));
this.drawer_.setAttribute('aria-hidden', 'true');
}
// Keep an eye on screen size, and add/remove auxiliary class for styling
// of small screens.
this.screenSizeMediaQuery_ = window.matchMedia(this.Constant_.MAX_WIDTH);
this.screenSizeMediaQuery_.addListener(this.screenSizeHandler_.bind(this));
this.screenSizeHandler_();
// Initialize tabs, if any.
if (this.header_ && this.tabBar_) {
this.element_.classList.add(this.CssClasses_.HAS_TABS);
var tabContainer = document.createElement('div');
tabContainer.classList.add(this.CssClasses_.TAB_CONTAINER);
this.header_.insertBefore(tabContainer, this.tabBar_);
this.header_.removeChild(this.tabBar_);
var leftButton = document.createElement('div');
leftButton.classList.add(this.CssClasses_.TAB_BAR_BUTTON);
leftButton.classList.add(this.CssClasses_.TAB_BAR_LEFT_BUTTON);
var leftButtonIcon = document.createElement('i');
leftButtonIcon.classList.add(this.CssClasses_.ICON);
leftButtonIcon.textContent = this.Constant_.CHEVRON_LEFT;
leftButton.appendChild(leftButtonIcon);
leftButton.addEventListener('click', function () {
this.tabBar_.scrollLeft -= this.Constant_.TAB_SCROLL_PIXELS;
}.bind(this));
var rightButton = document.createElement('div');
rightButton.classList.add(this.CssClasses_.TAB_BAR_BUTTON);
rightButton.classList.add(this.CssClasses_.TAB_BAR_RIGHT_BUTTON);
var rightButtonIcon = document.createElement('i');
rightButtonIcon.classList.add(this.CssClasses_.ICON);
rightButtonIcon.textContent = this.Constant_.CHEVRON_RIGHT;
rightButton.appendChild(rightButtonIcon);
rightButton.addEventListener('click', function () {
this.tabBar_.scrollLeft += this.Constant_.TAB_SCROLL_PIXELS;
}.bind(this));
tabContainer.appendChild(leftButton);
tabContainer.appendChild(this.tabBar_);
tabContainer.appendChild(rightButton);
// Add and remove tab buttons depending on scroll position and total
// window size.
var tabUpdateHandler = function () {
if (this.tabBar_.scrollLeft > 0) {
leftButton.classList.add(this.CssClasses_.IS_ACTIVE);
} else {
leftButton.classList.remove(this.CssClasses_.IS_ACTIVE);
}
if (this.tabBar_.scrollLeft < this.tabBar_.scrollWidth - this.tabBar_.offsetWidth) {
rightButton.classList.add(this.CssClasses_.IS_ACTIVE);
} else {
rightButton.classList.remove(this.CssClasses_.IS_ACTIVE);
}
}.bind(this);
this.tabBar_.addEventListener('scroll', tabUpdateHandler);
tabUpdateHandler();
// Update tabs when the window resizes.
var windowResizeHandler = function () {
// Use timeouts to make sure it doesn't happen too often.
if (this.resizeTimeoutId_) {
clearTimeout(this.resizeTimeoutId_);
}
this.resizeTimeoutId_ = setTimeout(function () {
tabUpdateHandler();
this.resizeTimeoutId_ = null;
}.bind(this), this.Constant_.RESIZE_TIMEOUT);
}.bind(this);
window.addEventListener('resize', windowResizeHandler);
if (this.tabBar_.classList.contains(this.CssClasses_.JS_RIPPLE_EFFECT)) {
this.tabBar_.classList.add(this.CssClasses_.RIPPLE_IGNORE_EVENTS);
}
// Select element tabs, document panels
var tabs = this.tabBar_.querySelectorAll('.' + this.CssClasses_.TAB);
var panels = this.content_.querySelectorAll('.' + this.CssClasses_.PANEL);
// Create new tabs for each tab element
for (var i = 0; i < tabs.length; i++) {
MaterialLayoutTab(tabs[i], tabs, panels, this);
}
}
this.element_.classList.add(this.CssClasses_.IS_UPGRADED);
}
};
/**
* Factory for an individual tab.
*
* @constructor
* @param tab The HTML element for the tab.
* @param tabs Array with HTML elements for all tabs.
* @param panels Array with HTML elements for all panels.
* @param layout The MdlLayout element that owns the tab.
*/
export function MaterialLayoutTab(tab: any, tabs: any, panels: any, layout: any) {
/**
* Auxiliary method to programmatically select a tab in the UI.
*/
function selectTab() {
var href = tab.href.split('#')[1];
var panel = layout.content_.querySelector('#' + href);
layout.resetTabState_(tabs);
layout.resetPanelState_(panels);
tab.classList.add(layout.CssClasses_.IS_ACTIVE);
panel.classList.add(layout.CssClasses_.IS_ACTIVE);
}
if (layout.tabBar_.classList.contains(layout.CssClasses_.JS_RIPPLE_EFFECT)) {
var rippleContainer = document.createElement('span');
rippleContainer.classList.add(layout.CssClasses_.RIPPLE_CONTAINER);
rippleContainer.classList.add(layout.CssClasses_.JS_RIPPLE_EFFECT);
var ripple = document.createElement('span');
ripple.classList.add(layout.CssClasses_.RIPPLE);
rippleContainer.appendChild(ripple);
tab.appendChild(rippleContainer);
}
tab.addEventListener('click', function (e:any) {
if (tab.getAttribute('href').charAt(0) === '#') {
e.preventDefault();
selectTab();
}
});
tab.show = selectTab;
}
MdlLayout.prototype.activateTab = function(idTab: string): void {
const tabsBar: HTMLElement = document.querySelector('ml-header-tabs') as HTMLElement;
const tabheaderContent: HTMLCollection = document.querySelectorAll('ml-header-tab-content') as HTMLCollection;
this.resetTabState_(tabsBar.children);
this.resetPanelState_(tabheaderContent);
const tabToActivate = this.getTab(idTab);
if(tabToActivate){
this.setTabActive(tabToActivate);
this.setTabContentActive(idTab);
}
};
MdlLayout.prototype.getTab = function(href: string): HTMLElement | void {
const tabs: HTMLCollection = document.querySelectorAll('ml-header-tabs > *') as HTMLCollection;
const tabsArray = Array.from(tabs);
for( let i = 0; i < tabsArray.length; i++){
if( (tabsArray[i] as HTMLAnchorElement).hash === '#'+href){
return tabsArray[i] as HTMLElement;
}
}
};
MdlLayout.prototype.setTabActive = function(tab: HTMLElement): void {
tab.classList.add('is-active');
};
MdlLayout.prototype.setTabContentActive = function(id: string): void {
(document.getElementById(id) as HTMLElement).classList.add('is-active');
}; | the_stack |
import { HttpClient, HttpResponse, HttpEvent } from '@angular/common/http';
import { Inject, Injectable, Optional } from '@angular/core';
import { OrgsAPIClientInterface } from './orgs-api-client.interface';
import { Observable } from 'rxjs';
import { tap } from 'rxjs/operators';
import { USE_DOMAIN, USE_HTTP_OPTIONS, OrgsAPIClient } from './orgs-api-client.service';
import { DefaultHttpOptions, HttpOptions } from '../../types';
import * as models from '../../models';
import * as guards from '../../guards';
@Injectable()
export class GuardedOrgsAPIClient extends OrgsAPIClient implements OrgsAPIClientInterface {
constructor(
readonly httpClient: HttpClient,
@Optional() @Inject(USE_DOMAIN) domain?: string,
@Optional() @Inject(USE_HTTP_OPTIONS) options?: DefaultHttpOptions,
) {
super(httpClient, domain, options);
}
/**
* Get an Organization.
* Response generated for [ 200 ] HTTP response code.
*/
getOrgsOrg(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'body',
): Observable<models.Organization>;
getOrgsOrg(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'response',
): Observable<HttpResponse<models.Organization>>;
getOrgsOrg(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'events',
): Observable<HttpEvent<models.Organization>>;
getOrgsOrg(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe: any = 'body',
): Observable<models.Organization | HttpResponse<models.Organization> | HttpEvent<models.Organization>> {
return super.getOrgsOrg(args, requestHttpOptions, observe)
.pipe(tap((res: any) => guards.isOrganization(res) || console.error(`TypeGuard for response 'models.Organization' caught inconsistency.`, res)));
}
/**
* Edit an Organization.
* Response generated for [ 200 ] HTTP response code.
*/
patchOrgsOrg(
args: Exclude<OrgsAPIClientInterface['patchOrgsOrgParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'body',
): Observable<models.Organization>;
patchOrgsOrg(
args: Exclude<OrgsAPIClientInterface['patchOrgsOrgParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'response',
): Observable<HttpResponse<models.Organization>>;
patchOrgsOrg(
args: Exclude<OrgsAPIClientInterface['patchOrgsOrgParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'events',
): Observable<HttpEvent<models.Organization>>;
patchOrgsOrg(
args: Exclude<OrgsAPIClientInterface['patchOrgsOrgParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe: any = 'body',
): Observable<models.Organization | HttpResponse<models.Organization> | HttpEvent<models.Organization>> {
return super.patchOrgsOrg(args, requestHttpOptions, observe)
.pipe(tap((res: any) => guards.isOrganization(res) || console.error(`TypeGuard for response 'models.Organization' caught inconsistency.`, res)));
}
/**
* List public events for an organization.
* Response generated for [ 200 ] HTTP response code.
*/
getOrgsOrgEvents(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgEventsParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'body',
): Observable<models.Events>;
getOrgsOrgEvents(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgEventsParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'response',
): Observable<HttpResponse<models.Events>>;
getOrgsOrgEvents(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgEventsParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'events',
): Observable<HttpEvent<models.Events>>;
getOrgsOrgEvents(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgEventsParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe: any = 'body',
): Observable<models.Events | HttpResponse<models.Events> | HttpEvent<models.Events>> {
return super.getOrgsOrgEvents(args, requestHttpOptions, observe)
.pipe(tap((res: any) => guards.isEvents(res) || console.error(`TypeGuard for response 'models.Events' caught inconsistency.`, res)));
}
/**
* List issues.
* List all issues for a given organization for the authenticated user.
*
* Response generated for [ 200 ] HTTP response code.
*/
getOrgsOrgIssues(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgIssuesParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'body',
): Observable<models.Issues>;
getOrgsOrgIssues(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgIssuesParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'response',
): Observable<HttpResponse<models.Issues>>;
getOrgsOrgIssues(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgIssuesParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'events',
): Observable<HttpEvent<models.Issues>>;
getOrgsOrgIssues(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgIssuesParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe: any = 'body',
): Observable<models.Issues | HttpResponse<models.Issues> | HttpEvent<models.Issues>> {
return super.getOrgsOrgIssues(args, requestHttpOptions, observe)
.pipe(tap((res: any) => guards.isIssues(res) || console.error(`TypeGuard for response 'models.Issues' caught inconsistency.`, res)));
}
/**
* Members list.
* List all users who are members of an organization. A member is a user tha
* belongs to at least 1 team in the organization. If the authenticated user
* is also an owner of this organization then both concealed and public members
* will be returned. If the requester is not an owner of the organization the
* query will be redirected to the public members list.
*
* Response generated for [ 200 ] HTTP response code.
*/
getOrgsOrgMembers(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgMembersParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'body',
): Observable<models.Users>;
getOrgsOrgMembers(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgMembersParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'response',
): Observable<HttpResponse<models.Users>>;
getOrgsOrgMembers(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgMembersParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'events',
): Observable<HttpEvent<models.Users>>;
getOrgsOrgMembers(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgMembersParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe: any = 'body',
): Observable<models.Users | HttpResponse<models.Users> | HttpEvent<models.Users>> {
return super.getOrgsOrgMembers(args, requestHttpOptions, observe)
.pipe(tap((res: any) => guards.isUsers(res) || console.error(`TypeGuard for response 'models.Users' caught inconsistency.`, res)));
}
/**
* Remove a member.
* Removing a user from this list will remove them from all teams and they
* will no longer have any access to the organization's repositories.
*
* Response generated for [ 204 ] HTTP response code.
*/
deleteOrgsOrgMembersUsername(
args: Exclude<OrgsAPIClientInterface['deleteOrgsOrgMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'body',
): Observable<void>;
deleteOrgsOrgMembersUsername(
args: Exclude<OrgsAPIClientInterface['deleteOrgsOrgMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'response',
): Observable<HttpResponse<void>>;
deleteOrgsOrgMembersUsername(
args: Exclude<OrgsAPIClientInterface['deleteOrgsOrgMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'events',
): Observable<HttpEvent<void>>;
deleteOrgsOrgMembersUsername(
args: Exclude<OrgsAPIClientInterface['deleteOrgsOrgMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe: any = 'body',
): Observable<void | HttpResponse<void> | HttpEvent<void>> {
return super.deleteOrgsOrgMembersUsername(args, requestHttpOptions, observe);
}
/**
* Check if a user is, publicly or privately, a member of the organization.
* Response generated for [ 204 ] HTTP response code.
*/
getOrgsOrgMembersUsername(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'body',
): Observable<void>;
getOrgsOrgMembersUsername(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'response',
): Observable<HttpResponse<void>>;
getOrgsOrgMembersUsername(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'events',
): Observable<HttpEvent<void>>;
getOrgsOrgMembersUsername(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe: any = 'body',
): Observable<void | HttpResponse<void> | HttpEvent<void>> {
return super.getOrgsOrgMembersUsername(args, requestHttpOptions, observe);
}
/**
* Public members list.
* Members of an organization can choose to have their membership publicized
* or not.
*
* Response generated for [ 200 ] HTTP response code.
*/
getOrgsOrgPublicMembers(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgPublicMembersParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'body',
): Observable<models.Users>;
getOrgsOrgPublicMembers(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgPublicMembersParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'response',
): Observable<HttpResponse<models.Users>>;
getOrgsOrgPublicMembers(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgPublicMembersParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'events',
): Observable<HttpEvent<models.Users>>;
getOrgsOrgPublicMembers(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgPublicMembersParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe: any = 'body',
): Observable<models.Users | HttpResponse<models.Users> | HttpEvent<models.Users>> {
return super.getOrgsOrgPublicMembers(args, requestHttpOptions, observe)
.pipe(tap((res: any) => guards.isUsers(res) || console.error(`TypeGuard for response 'models.Users' caught inconsistency.`, res)));
}
/**
* Conceal a user's membership.
* Response generated for [ 204 ] HTTP response code.
*/
deleteOrgsOrgPublicMembersUsername(
args: Exclude<OrgsAPIClientInterface['deleteOrgsOrgPublicMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'body',
): Observable<void>;
deleteOrgsOrgPublicMembersUsername(
args: Exclude<OrgsAPIClientInterface['deleteOrgsOrgPublicMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'response',
): Observable<HttpResponse<void>>;
deleteOrgsOrgPublicMembersUsername(
args: Exclude<OrgsAPIClientInterface['deleteOrgsOrgPublicMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'events',
): Observable<HttpEvent<void>>;
deleteOrgsOrgPublicMembersUsername(
args: Exclude<OrgsAPIClientInterface['deleteOrgsOrgPublicMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe: any = 'body',
): Observable<void | HttpResponse<void> | HttpEvent<void>> {
return super.deleteOrgsOrgPublicMembersUsername(args, requestHttpOptions, observe);
}
/**
* Check public membership.
* Response generated for [ 204 ] HTTP response code.
*/
getOrgsOrgPublicMembersUsername(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgPublicMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'body',
): Observable<void>;
getOrgsOrgPublicMembersUsername(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgPublicMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'response',
): Observable<HttpResponse<void>>;
getOrgsOrgPublicMembersUsername(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgPublicMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'events',
): Observable<HttpEvent<void>>;
getOrgsOrgPublicMembersUsername(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgPublicMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe: any = 'body',
): Observable<void | HttpResponse<void> | HttpEvent<void>> {
return super.getOrgsOrgPublicMembersUsername(args, requestHttpOptions, observe);
}
/**
* Publicize a user's membership.
* Response generated for [ 204 ] HTTP response code.
*/
putOrgsOrgPublicMembersUsername(
args: Exclude<OrgsAPIClientInterface['putOrgsOrgPublicMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'body',
): Observable<void>;
putOrgsOrgPublicMembersUsername(
args: Exclude<OrgsAPIClientInterface['putOrgsOrgPublicMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'response',
): Observable<HttpResponse<void>>;
putOrgsOrgPublicMembersUsername(
args: Exclude<OrgsAPIClientInterface['putOrgsOrgPublicMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'events',
): Observable<HttpEvent<void>>;
putOrgsOrgPublicMembersUsername(
args: Exclude<OrgsAPIClientInterface['putOrgsOrgPublicMembersUsernameParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe: any = 'body',
): Observable<void | HttpResponse<void> | HttpEvent<void>> {
return super.putOrgsOrgPublicMembersUsername(args, requestHttpOptions, observe);
}
/**
* List repositories for the specified org.
* Response generated for [ 200 ] HTTP response code.
*/
getOrgsOrgRepos(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgReposParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'body',
): Observable<models.Repos>;
getOrgsOrgRepos(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgReposParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'response',
): Observable<HttpResponse<models.Repos>>;
getOrgsOrgRepos(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgReposParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'events',
): Observable<HttpEvent<models.Repos>>;
getOrgsOrgRepos(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgReposParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe: any = 'body',
): Observable<models.Repos | HttpResponse<models.Repos> | HttpEvent<models.Repos>> {
return super.getOrgsOrgRepos(args, requestHttpOptions, observe)
.pipe(tap((res: any) => guards.isRepos(res) || console.error(`TypeGuard for response 'models.Repos' caught inconsistency.`, res)));
}
/**
* Create a new repository for the authenticated user. OAuth users must supply
* repo scope.
*
* Response generated for [ 201 ] HTTP response code.
*/
postOrgsOrgRepos(
args: Exclude<OrgsAPIClientInterface['postOrgsOrgReposParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'body',
): Observable<models.Repos>;
postOrgsOrgRepos(
args: Exclude<OrgsAPIClientInterface['postOrgsOrgReposParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'response',
): Observable<HttpResponse<models.Repos>>;
postOrgsOrgRepos(
args: Exclude<OrgsAPIClientInterface['postOrgsOrgReposParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'events',
): Observable<HttpEvent<models.Repos>>;
postOrgsOrgRepos(
args: Exclude<OrgsAPIClientInterface['postOrgsOrgReposParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe: any = 'body',
): Observable<models.Repos | HttpResponse<models.Repos> | HttpEvent<models.Repos>> {
return super.postOrgsOrgRepos(args, requestHttpOptions, observe)
.pipe(tap((res: any) => guards.isRepos(res) || console.error(`TypeGuard for response 'models.Repos' caught inconsistency.`, res)));
}
/**
* List teams.
* Response generated for [ 200 ] HTTP response code.
*/
getOrgsOrgTeams(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgTeamsParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'body',
): Observable<models.Teams>;
getOrgsOrgTeams(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgTeamsParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'response',
): Observable<HttpResponse<models.Teams>>;
getOrgsOrgTeams(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgTeamsParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'events',
): Observable<HttpEvent<models.Teams>>;
getOrgsOrgTeams(
args: Exclude<OrgsAPIClientInterface['getOrgsOrgTeamsParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe: any = 'body',
): Observable<models.Teams | HttpResponse<models.Teams> | HttpEvent<models.Teams>> {
return super.getOrgsOrgTeams(args, requestHttpOptions, observe)
.pipe(tap((res: any) => guards.isTeams(res) || console.error(`TypeGuard for response 'models.Teams' caught inconsistency.`, res)));
}
/**
* Create team.
* In order to create a team, the authenticated user must be an owner of organization.
*
* Response generated for [ 201 ] HTTP response code.
*/
postOrgsOrgTeams(
args: Exclude<OrgsAPIClientInterface['postOrgsOrgTeamsParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'body',
): Observable<models.Team>;
postOrgsOrgTeams(
args: Exclude<OrgsAPIClientInterface['postOrgsOrgTeamsParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'response',
): Observable<HttpResponse<models.Team>>;
postOrgsOrgTeams(
args: Exclude<OrgsAPIClientInterface['postOrgsOrgTeamsParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe?: 'events',
): Observable<HttpEvent<models.Team>>;
postOrgsOrgTeams(
args: Exclude<OrgsAPIClientInterface['postOrgsOrgTeamsParams'], undefined>,
requestHttpOptions?: HttpOptions,
observe: any = 'body',
): Observable<models.Team | HttpResponse<models.Team> | HttpEvent<models.Team>> {
return super.postOrgsOrgTeams(args, requestHttpOptions, observe)
.pipe(tap((res: any) => guards.isTeam(res) || console.error(`TypeGuard for response 'models.Team' caught inconsistency.`, res)));
}
} | the_stack |
import {
Frame,
Page,
ViewportSize,
ChromiumBrowserContext,
BrowserContext,
HTTPCredentials,
Cookie,
} from 'playwright'
import debugFactory from 'debug'
import ms from 'ms'
import mime from 'mime-types'
import { Condition } from '../page/Condition'
import { Browser as BrowserInterface } from '../interface/IBrowser'
import { NullableLocatable } from './Locatable'
import {
ElementHandle,
NavigationOptions,
ScreenshotOptions,
EvaluateFn,
ClickOptions,
BrowserType,
CookiesFilterParams,
Locator,
ScrollDirection,
} from '../page/types'
import { TargetLocator } from '../page/TargetLocator'
import { PlaywrightClientLike } from '../driver/Playwright'
import { WorkRoot } from '../runtime-environment/types'
import { Key, KeyDefinitions } from '../page/Enums'
import { ConcreteTestSettings, DEFAULT_WAIT_TIMEOUT_MILLISECONDS } from './Settings'
import { NetworkErrorData, ActionErrorData } from './errors/Types'
import { StructuredError } from '../utils/StructuredError'
import Mouse from '../page/Mouse'
import { addCallbacks } from './decorators/addCallbacks'
import { autoWaitUntil } from './decorators/autoWait'
import { locatableToLocator, toLocatorError } from './toLocatorError'
import { Keyboard } from '../page/Keyboard'
import { getFrames } from '../utils/frames'
import { DeviceDescriptor } from '../page/Device'
import chalk from 'chalk'
import KSUID from 'ksuid'
import { Point } from '../page/Point'
import { isAnElementHandle, isLocator, isPoint } from '../utils/CheckInstance'
export const debug = debugFactory('element:runtime:browser')
export class Browser<T> implements BrowserInterface {
public screenshots: string[]
customContext: T
private newPageCallback: (resolve: (page: Page) => void) => void
private newPagePromise: Promise<Page>
private multipleUser = false
constructor(
public workRoot: WorkRoot,
private client: PlaywrightClientLike,
public settings: ConcreteTestSettings,
public beforeFunc: (browser: Browser<T>, name: string) => Promise<void> = async () => undefined,
public afterFunc: (browser: Browser<T>, name: string) => Promise<void> = async () => undefined,
private activeFrame?: Frame | null
) {
this.screenshots = []
this.newPageCallback = (resolve) => {
this.client.page.context().on('page', async (newPage) => {
this.client.page = newPage
resolve(newPage)
})
}
this.newPagePromise = new Promise((resolve) => {
this.newPageCallback(resolve)
})
}
public context(): BrowserContext {
return this.page.context()
}
public testData(name: string): string {
return this.workRoot.testData(name)
}
public setMultipleUser(isMultiple: boolean): void {
this.multipleUser = isMultiple
}
public get target(): Frame {
if (this.activeFrame) {
if (this.activeFrame.isDetached()) {
this.activeFrame = null
throw new Error(`Frame is detached`)
} else {
return this.activeFrame
}
} else {
return this.page.mainFrame()
}
}
public get browser(): BrowserType {
return this.settings.browser
}
public get page(): Page {
return this.client.page
}
public get pages(): Page[] {
return this.client.page.context().pages()
}
public get frames(): Frame[] {
return getFrames(this.page.frames())
}
public get mouse(): Mouse {
return new Mouse(this.page)
}
public get keyboard(): Keyboard {
return new Keyboard(this.page)
}
/**
* Returns the URL of the current frame/page
*/
public get url(): string {
return this.page.url()
}
private getKeyCode(key: string): string {
const lowerKey = key.toLowerCase()
//if key = `KeyA` or function key likes `CONTROL`, just return this key
if (lowerKey.includes('key') || Object.values(Key).includes(key)) {
return key
}
//now to process to get the key code
for (const key in KeyDefinitions) {
const keyObj = KeyDefinitions[key]
if (lowerKey === keyObj.key) {
return keyObj.code
}
}
return ''
}
@addCallbacks()
public title(): Promise<string> {
return this.page.title()
}
@addCallbacks()
public async evaluate(fn: EvaluateFn, ...args: any[]): Promise<any> {
return this.evaluateWithoutDecorator(fn, ...args)
}
@addCallbacks()
public async authenticate(username?: string, password?: string): Promise<void> {
let authOptions: HTTPCredentials | null = null
if (username !== undefined && password !== undefined) {
authOptions = { username, password }
}
await this.page.context().setHTTPCredentials(authOptions)
}
@addCallbacks()
public async wait(timeoutOrCondition: Condition | number | string): Promise<any> {
await this.waitWithoutDecorator(timeoutOrCondition)
}
@addCallbacks()
public async visit(url: string, options: NavigationOptions = {}): Promise<any> {
try {
return this.page.goto(url, {
timeout: Number(this.settings.waitTimeout),
waitUntil: 'load',
...options,
})
} catch (e) {
let finalErr = e
if (e.message.includes('net::ERR_NAME_NOT_RESOLVED')) {
finalErr = new StructuredError<NetworkErrorData>(
'domain name not resolved',
{
_kind: 'net',
url,
kind: 'net',
subKind: 'not-resolved',
},
e
)
}
if (e.message.includes('Navigation Timeout Exceeded')) {
finalErr = new StructuredError<NetworkErrorData>(
'navigation timed out',
{
_kind: 'net',
url,
kind: 'net',
subKind: 'navigation-timeout',
},
e
)
}
throw finalErr
}
}
/**
* Sends a click event to the element located at `selector`. If the element is
* currently outside the viewport it will first scroll to that element.
*/
@autoWaitUntil()
@addCallbacks()
public async click(selectorOrLocator: NullableLocatable, options?: ClickOptions): Promise<void> {
const element = await this.findElementWithoutDecorator(selectorOrLocator)
return element.click(options)
}
/**
* Sends a double-click event to the element located by the supplied Locator or `selector`. If the element is
* currently outside the viewport it will first scroll to that element.
*/
@autoWaitUntil()
@addCallbacks()
public async doubleClick(
selectorOrLocator: NullableLocatable,
options?: ClickOptions
): Promise<void> {
const element = await this.findElementWithoutDecorator(selectorOrLocator)
return element.click({ clickCount: 2, ...options })
}
@autoWaitUntil()
@addCallbacks()
public async selectByValue(locatable: NullableLocatable, ...values: string[]): Promise<string[]> {
const element = await this.findElementWithoutDecorator(locatable)
return this.evaluateWithoutDecorator(
(args: any[]) => {
const [element, values] = args as [HTMLSelectElement, any]
if (element.nodeName.toLowerCase() !== 'select')
throw new Error('Element is not a <select> element.')
const options = Array.from(element.options)
element.value = ''
for (const option of options) option.selected = values.includes(option.value)
element.dispatchEvent(new Event('input', { bubbles: true }))
element.dispatchEvent(new Event('change', { bubbles: true }))
return options.filter((option) => option.selected).map((option) => option.value)
},
[element.element, values]
)
}
@autoWaitUntil()
@addCallbacks()
public async selectByIndex(locatable: NullableLocatable, index: string): Promise<string[]> {
// TODO: Write tests for this
const element = await this.findElementWithoutDecorator(locatable)
return this.evaluateWithoutDecorator(
(args: any[]) => {
const [element, index] = args as [HTMLSelectElement, number]
if (element.nodeName.toLowerCase() !== 'select')
throw new Error('Element is not a <select> element.')
const options = Array.from(element.options)
element.value = ''
element.selectedIndex = index
element.dispatchEvent(new Event('input', { bubbles: true }))
element.dispatchEvent(new Event('change', { bubbles: true }))
return options.filter((option) => option.selected).map((option) => option.value)
},
[element.element, index]
)
}
@autoWaitUntil()
@addCallbacks()
public async selectByText(locatable: NullableLocatable, text: string): Promise<string[]> {
const element = await this.findElementWithoutDecorator(locatable)
return this.evaluateWithoutDecorator(
(args: any[]) => {
const [element, text] = args as [HTMLSelectElement, string]
if (element.nodeName.toLowerCase() !== 'select')
throw new Error('Element is not a <select> element.')
const options = Array.from(element.options)
element.value = ''
for (const option of options)
option.selected = option.text === text || option.label === text
element.dispatchEvent(new Event('input', { bubbles: true }))
element.dispatchEvent(new Event('change', { bubbles: true }))
return options.filter((option) => option.selected).map((option) => option.value)
},
[element.element, text]
)
}
@autoWaitUntil()
@addCallbacks()
public async clear(locatable: NullableLocatable | string): Promise<void> {
const locator = locatableToLocator(locatable, 'browser.clear()')
const elements = await locator.findMany(this.page, undefined, this.target)
for (const element of elements) {
await element.clear()
}
}
@autoWaitUntil()
@addCallbacks()
public async type(
locatable: NullableLocatable,
text: string,
options?: { delay: number }
): Promise<void> {
const element = await this.findElementWithoutDecorator(locatable)
await element.focus()
return this.page.keyboard.type(text, options)
}
@addCallbacks()
public async press(keyCode: string, options?: { text?: string; delay?: number }): Promise<void> {
return this.page.keyboard.press(keyCode, options)
}
@addCallbacks()
public async sendKeys(...keys: string[]): Promise<void> {
const handle = this.page.keyboard
for (const key of keys) {
if (Object.values(Key).includes(key)) {
await handle.press(key)
} else {
await handle.type(key)
}
}
}
@addCallbacks()
public async sendKeyCombinations(...keys: string[]): Promise<void> {
const handle = this.page.keyboard
for (const key of keys) {
const keyCode = this.getKeyCode(key)
await handle.down(keyCode)
}
for (const key of keys.reverse()) {
const keyCode = this.getKeyCode(key)
await handle.up(keyCode)
}
}
@autoWaitUntil()
@addCallbacks()
public async blur(locatable: NullableLocatable): Promise<void> {
const element = await this.findElementWithoutDecorator(locatable)
return element.blur()
}
@autoWaitUntil()
@addCallbacks()
public async focus(locatable: NullableLocatable): Promise<void> {
const element = await this.findElementWithoutDecorator(locatable)
return element.focus()
}
@addCallbacks()
public async clearBrowserCookies(): Promise<any> {
await this.page.context().clearCookies()
}
@addCallbacks()
public async clearBrowserCache(): Promise<any> {
if (this.browser === 'chromium') {
const context = this.page.context() as ChromiumBrowserContext
const client = await context.newCDPSession(this.page)
await client.send('Network.clearBrowserCache')
}
}
@addCallbacks()
public async emulateDevice(device: DeviceDescriptor): Promise<void> {
await this.client.closePages()
const context = await this.client.browser.newContext({
...device,
})
this.client.page = await context.newPage()
}
@addCallbacks()
public async setUserAgent(userAgent: string): Promise<void> {
if (!this.client.isFirstRun()) {
await this.client.closePages()
this.client.page = await this.client.browser.newPage({ userAgent })
}
}
@addCallbacks()
public async setViewport(viewport: ViewportSize): Promise<void> {
return this.page.setViewportSize(viewport)
}
@addCallbacks()
public async setExtraHTTPHeaders(headers: { [key: string]: string }): Promise<void> {
if (Object.keys(headers).length) return this.page.setExtraHTTPHeaders(headers)
}
/**
* Takes a screenshot of this element and saves it to the results folder with a random name.
*/
@addCallbacks()
public async takeScreenshot(options?: ScreenshotOptions): Promise<void> {
await this.saveScreenshot(async (path) => {
await this.page.screenshot({ path, ...options })
if (!this.multipleUser) {
console.log(chalk.grey(`Screenshot saved in ${path}`))
}
return true
})
}
@autoWaitUntil()
@addCallbacks()
public async highlightElement(element: ElementHandle): Promise<void> {
return element.highlight()
}
@autoWaitUntil()
@addCallbacks()
public async findElement(locatable: NullableLocatable): Promise<ElementHandle> {
return this.findElementWithoutDecorator(locatable)
}
public async maybeFindElement(locatable: NullableLocatable): Promise<ElementHandle | null> {
if (locatable === null) {
return null
}
const locator = locatableToLocator(locatable, 'browser.maybeFindElement(locatable)')
const maybeElement = await locator.find(this.page, undefined, this.target)
if (!maybeElement) return null
const element = maybeElement as ElementHandle
element.bindBrowser(this)
return element
}
@autoWaitUntil()
@addCallbacks()
public async findElements(locatable: NullableLocatable): Promise<ElementHandle[]> {
const locator = locatableToLocator(locatable, 'browser.findElements(locatable)')
const elements = await locator.findMany(this.page, undefined, this.target)
elements.forEach((element) => element.bindBrowser(this))
return elements
}
/**
* Switch the focus of the browser to another frame or window
*/
public switchTo(): TargetLocator {
return new TargetLocator(
this.page,
this.target,
(frame) => {
this.activeFrame = frame
},
(page) => this.switchPage(page)
)
}
public async performanceTiming(): Promise<PerformanceTiming> {
return this.page.evaluate(() => performance.timing.toJSON())
}
public async navigationTiming(): Promise<PerformanceTiming> {
const data = await this.page.evaluate(() => JSON.stringify(window.performance.timing))
return JSON.parse(data.toString())
}
/**
* Fetches the paint performance timing entries
*/
public async paintTiming(): Promise<PerformanceEntry[]> {
const data = await this.page.evaluate(() =>
JSON.stringify(window.performance.getEntriesByType('paint'))
)
return JSON.parse(data.toString())
}
public async waitForNavigation(): Promise<any> {
return this.page.waitForNavigation({ waitUntil: 'domcontentloaded' })
}
// TODO fix this
public async interactionTiming(): Promise<number> {
// try {
// let polyfill = readFileSync(
// resolve(join(__dirname, '../extern/tti-polyfill-debug.js')),
// 'utf8',
// )
// await this.target.evaluate(polyfill)
// return await this.target.evaluate('window.ttiPolyfill.getFirstConsistentlyInteractive()')
// } catch (e) {
// console.warn('error getting interaction timing:', e)
// return 0
// }
return 0
}
public async setCacheDisabled(cacheDisabled = true): Promise<void> {
/**
* NOTES
* handle CDPSession
*/
if (this.browser === 'chromium') {
const client = await (this.page.context() as ChromiumBrowserContext).newCDPSession(this.page)
await client.send('Network.setCacheDisabled', { cacheDisabled })
} else {
console.warn('This kind of browser does not support CDPSession')
}
}
public fetchScreenshots(): string[] {
const screenshots = [...this.screenshots]
this.screenshots = []
return screenshots
}
public async saveScreenshot(fn: (path: string) => Promise<boolean>): Promise<void> {
const fileId = KSUID.randomSync().string
const path = this.workRoot.join('screenshots', `${fileId}.jpg`)
if (await fn(path)) {
this.screenshots.push(path)
}
}
private async switchPage(page: Page | number): Promise<void> {
if (typeof page === 'number') {
this.client.page = this.pages[page]
} else {
this.client.page = page
}
await this.client.page.bringToFront()
}
public async waitForNewPage(): Promise<Page> {
const newPage = await this.newPagePromise
// wait for another page to be opened
this.newPagePromise = new Promise((resolve) => {
this.newPageCallback(resolve)
})
return newPage
}
public async close(): Promise<void> {
await this.client.browser.close()
}
@addCallbacks()
public async getCookies(filterBy?: CookiesFilterParams): Promise<Cookie[]> {
const urls = filterBy?.urls
const names = filterBy?.names
let cookies = await this.page.context().cookies(urls)
if (names) {
cookies = cookies.filter((cookie) =>
typeof names === 'string' ? cookie.name === names : names.includes(cookie.name)
)
}
return cookies
}
public getUrl(): string {
return this.page.url()
}
private isCorrectScrollBehavior(behavior: string): behavior is ScrollBehavior {
return ['auto', 'smooth'].includes(behavior)
}
@addCallbacks()
public async scrollBy(
x: number | 'window.innerWidth',
y: number | 'window.innerHeight',
scrollOptions?: ScrollOptions
): Promise<void> {
const behavior = scrollOptions?.behavior ?? 'auto'
if (!this.isCorrectScrollBehavior(behavior)) {
throw new Error('The input behavior is not correct (Must be "auto" or "smooth").')
}
if (x !== 'window.innerWidth' && typeof x !== 'number') {
throw new Error(
'The input x that you want to scroll by must be "window.innerWidth" or a number.'
)
}
if (y !== 'window.innerHeight' && typeof y !== 'number') {
throw new Error(
'The input y that you want to scroll by must be "window.innerHeight" or a number.'
)
}
await this.page.evaluate(
({ x, y, behavior }) => {
window.scrollBy({
top: y === 'window.innerHeight' ? window.innerHeight : y,
left: x === 'window.innerWidth' ? window.innerWidth : x,
behavior,
})
},
{ x, y, behavior }
)
}
@addCallbacks()
public async scrollTo(
target: Locator | ElementHandle | Point | ScrollDirection,
scrollOptions?: ScrollIntoViewOptions
): Promise<void> {
const behavior = scrollOptions?.behavior ?? 'auto'
if (!this.isCorrectScrollBehavior(behavior)) {
throw new Error('The input behavior is not correct (Must be "auto" or "smooth").')
}
const block = scrollOptions?.block ?? 'start'
const inline = scrollOptions?.inline ?? 'nearest'
let top = 0
let left = 0
const [_scrollHeight, _currentTop, _scrollWidth] = await this.page.evaluate(() => [
document.body.scrollHeight,
window.pageYOffset || document.documentElement.scrollTop,
document.body.scrollWidth,
])
if (
(isLocator(target) || isAnElementHandle(target)) &&
!isPoint(target) &&
typeof target !== 'string'
) {
const targetEl: ElementHandle = isLocator(target)
? await this.findElementWithoutDecorator(target)
: target
await targetEl.element.evaluate(
(elementNode, scrollOptions) => {
const element = elementNode as HTMLElement
element.scrollIntoView(scrollOptions)
},
{ behavior, block, inline }
)
return
}
if (isPoint(target)) {
;[left, top] = target
} else if (typeof target === 'string') {
switch (target) {
case 'top':
top = 0
left = 0
break
case 'bottom':
top = _scrollHeight
left = 0
break
case 'left':
top = _currentTop
left = 0
break
case 'right':
top = _currentTop
left = _scrollWidth
break
default:
throw new Error(
'The input target is not a Locator or an ElementHandle or a Point or a Scroll Direction.'
)
}
} else {
throw new Error(
'The input target is not a Locator or an ElementHandle or a Point or a Scroll Direction.'
)
}
await this.page.evaluate(
({ top, left, behavior }) => {
window.scrollTo({ top, left, behavior })
},
{ top, left, behavior }
)
}
public getMimeType(filePath: string): string | false {
return mime.lookup(filePath)
}
private async evaluateWithoutDecorator(fn: EvaluateFn, ...args: any[]): Promise<any> {
return this.target.evaluate(fn, ...args)
}
private async findElementWithoutDecorator(locatable: NullableLocatable): Promise<ElementHandle> {
const locator = locatableToLocator(locatable, 'browser.findElement(locatable)')
debug('locator %o', locator)
const maybeElement = await locator.find(this.page, undefined, this.target)
if (!maybeElement) {
throw toLocatorError(locatable, 'browser.findElement()')
}
const element = maybeElement as ElementHandle
element.bindBrowser(this)
return element
}
private async waitWithoutDecorator(
timeoutOrCondition: Condition | number | string
): Promise<any> {
if (typeof timeoutOrCondition === 'string') {
await new Promise((yeah) => setTimeout(yeah, ms(timeoutOrCondition)))
return true
} else if (typeof timeoutOrCondition === 'number') {
let convertedTimeout = timeoutOrCondition
if (convertedTimeout < 0) convertedTimeout = DEFAULT_WAIT_TIMEOUT_MILLISECONDS
else if (convertedTimeout < 1e3) convertedTimeout *= 1e3
await new Promise((yeah) => setTimeout(yeah, convertedTimeout))
return true
}
debug('wait')
try {
const condition: Condition = timeoutOrCondition
condition.settings = this.settings
if (condition.hasWaitFor) {
return await condition.waitFor(this.target, this.page)
} else {
return await condition.waitForEvent(this.page)
}
} catch (err) {
debug('wait timed out')
throw new StructuredError<ActionErrorData>(
'wait timed out',
{
_kind: 'action',
kind: 'wait-timeout',
action: 'wait',
},
err
)
}
}
@autoWaitUntil()
@addCallbacks()
public async drag(from: ElementHandle, to: ElementHandle): Promise<void> {
const dataTransfer = await this.page.evaluateHandle(() => new DataTransfer())
await from.element.dispatchEvent('dragstart', { dataTransfer })
await to.element.dispatchEvent('drop', { dataTransfer })
}
} | the_stack |
import { Components, JSX } from '@bulmil/core';
interface BmButtonProps {
/** Is disabled? */
disabled?: Components.BmButton["disabled"]
/** Color */
color?: Components.BmButton["color"]
/** Size */
size?: Components.BmButton["size"]
/** Display the button in full-width */
isFullwidth?: Components.BmButton["isFullwidth"]
/** Is outlined */
isOutlined?: Components.BmButton["isOutlined"]
/** Is light */
isLight?: Components.BmButton["isLight"]
/** Is inverted */
isInverted?: Components.BmButton["isInverted"]
/** Rounded button */
isRounded?: Components.BmButton["isRounded"]
/** Hovered state */
isHovered?: Components.BmButton["isHovered"]
/** Focused state */
isFocused?: Components.BmButton["isFocused"]
/** Active state */
isActive?: Components.BmButton["isActive"]
/** Static */
isStatic?: Components.BmButton["isStatic"]
/** Loading state */
isLoading?: Components.BmButton["isLoading"]
}
interface BmButtonEvents {
}
interface BmButtonSlots {
default: any
}
/* generated by Svelte v3.42.6 */
import {
SvelteComponent,
binding_callbacks,
create_slot,
detach,
element,
flush,
get_all_dirty_from_scope,
get_slot_changes,
init,
insert,
safe_not_equal,
set_custom_element_data,
transition_in,
transition_out,
update_slot_base
} from "svelte/internal";
import { createEventDispatcher, onMount } from 'svelte';
function create_fragment(ctx) {
let bm_button;
let current;
const default_slot_template = /*#slots*/ ctx[16].default;
const default_slot = create_slot(default_slot_template, ctx, /*$$scope*/ ctx[15], null);
return {
c() {
bm_button = element("bm-button");
if (default_slot) default_slot.c();
set_custom_element_data(bm_button, "disabled", /*disabled*/ ctx[0]);
set_custom_element_data(bm_button, "color", /*color*/ ctx[1]);
set_custom_element_data(bm_button, "size", /*size*/ ctx[2]);
set_custom_element_data(bm_button, "is-fullwidth", /*isFullwidth*/ ctx[3]);
set_custom_element_data(bm_button, "is-outlined", /*isOutlined*/ ctx[4]);
set_custom_element_data(bm_button, "is-light", /*isLight*/ ctx[5]);
set_custom_element_data(bm_button, "is-inverted", /*isInverted*/ ctx[6]);
set_custom_element_data(bm_button, "is-rounded", /*isRounded*/ ctx[7]);
set_custom_element_data(bm_button, "is-hovered", /*isHovered*/ ctx[8]);
set_custom_element_data(bm_button, "is-focused", /*isFocused*/ ctx[9]);
set_custom_element_data(bm_button, "is-active", /*isActive*/ ctx[10]);
set_custom_element_data(bm_button, "is-static", /*isStatic*/ ctx[11]);
set_custom_element_data(bm_button, "is-loading", /*isLoading*/ ctx[12]);
},
m(target, anchor) {
insert(target, bm_button, anchor);
if (default_slot) {
default_slot.m(bm_button, null);
}
/*bm_button_binding*/ ctx[17](bm_button);
current = true;
},
p(ctx, [dirty]) {
if (default_slot) {
if (default_slot.p && (!current || dirty & /*$$scope*/ 32768)) {
update_slot_base(
default_slot,
default_slot_template,
ctx,
/*$$scope*/ ctx[15],
!current
? get_all_dirty_from_scope(/*$$scope*/ ctx[15])
: get_slot_changes(default_slot_template, /*$$scope*/ ctx[15], dirty, null),
null
);
}
}
if (!current || dirty & /*disabled*/ 1) {
set_custom_element_data(bm_button, "disabled", /*disabled*/ ctx[0]);
}
if (!current || dirty & /*color*/ 2) {
set_custom_element_data(bm_button, "color", /*color*/ ctx[1]);
}
if (!current || dirty & /*size*/ 4) {
set_custom_element_data(bm_button, "size", /*size*/ ctx[2]);
}
if (!current || dirty & /*isFullwidth*/ 8) {
set_custom_element_data(bm_button, "is-fullwidth", /*isFullwidth*/ ctx[3]);
}
if (!current || dirty & /*isOutlined*/ 16) {
set_custom_element_data(bm_button, "is-outlined", /*isOutlined*/ ctx[4]);
}
if (!current || dirty & /*isLight*/ 32) {
set_custom_element_data(bm_button, "is-light", /*isLight*/ ctx[5]);
}
if (!current || dirty & /*isInverted*/ 64) {
set_custom_element_data(bm_button, "is-inverted", /*isInverted*/ ctx[6]);
}
if (!current || dirty & /*isRounded*/ 128) {
set_custom_element_data(bm_button, "is-rounded", /*isRounded*/ ctx[7]);
}
if (!current || dirty & /*isHovered*/ 256) {
set_custom_element_data(bm_button, "is-hovered", /*isHovered*/ ctx[8]);
}
if (!current || dirty & /*isFocused*/ 512) {
set_custom_element_data(bm_button, "is-focused", /*isFocused*/ ctx[9]);
}
if (!current || dirty & /*isActive*/ 1024) {
set_custom_element_data(bm_button, "is-active", /*isActive*/ ctx[10]);
}
if (!current || dirty & /*isStatic*/ 2048) {
set_custom_element_data(bm_button, "is-static", /*isStatic*/ ctx[11]);
}
if (!current || dirty & /*isLoading*/ 4096) {
set_custom_element_data(bm_button, "is-loading", /*isLoading*/ ctx[12]);
}
},
i(local) {
if (current) return;
transition_in(default_slot, local);
current = true;
},
o(local) {
transition_out(default_slot, local);
current = false;
},
d(detaching) {
if (detaching) detach(bm_button);
if (default_slot) default_slot.d(detaching);
/*bm_button_binding*/ ctx[17](null);
}
};
}
function instance($$self, $$props, $$invalidate) {
let { $$slots: slots = {}, $$scope } = $$props;
let __ref;
let __mounted = false;
const dispatch = createEventDispatcher();
let { disabled = undefined } = $$props;
let { color = undefined } = $$props;
let { size = undefined } = $$props;
let { isFullwidth = undefined } = $$props;
let { isOutlined = undefined } = $$props;
let { isLight = undefined } = $$props;
let { isInverted = undefined } = $$props;
let { isRounded = undefined } = $$props;
let { isHovered = undefined } = $$props;
let { isFocused = undefined } = $$props;
let { isActive = undefined } = $$props;
let { isStatic = undefined } = $$props;
let { isLoading = undefined } = $$props;
const getWebComponent = () => __ref;
onMount(() => {
__mounted = true;
});
const setProp = (prop, value) => {
if (__ref) $$invalidate(13, __ref[prop] = value, __ref);
};
const onEvent = e => {
e.stopPropagation();
dispatch(e.type, e.detail);
};
function bm_button_binding($$value) {
binding_callbacks[$$value ? 'unshift' : 'push'](() => {
__ref = $$value;
$$invalidate(13, __ref);
});
}
$$self.$$set = $$props => {
if ('disabled' in $$props) $$invalidate(0, disabled = $$props.disabled);
if ('color' in $$props) $$invalidate(1, color = $$props.color);
if ('size' in $$props) $$invalidate(2, size = $$props.size);
if ('isFullwidth' in $$props) $$invalidate(3, isFullwidth = $$props.isFullwidth);
if ('isOutlined' in $$props) $$invalidate(4, isOutlined = $$props.isOutlined);
if ('isLight' in $$props) $$invalidate(5, isLight = $$props.isLight);
if ('isInverted' in $$props) $$invalidate(6, isInverted = $$props.isInverted);
if ('isRounded' in $$props) $$invalidate(7, isRounded = $$props.isRounded);
if ('isHovered' in $$props) $$invalidate(8, isHovered = $$props.isHovered);
if ('isFocused' in $$props) $$invalidate(9, isFocused = $$props.isFocused);
if ('isActive' in $$props) $$invalidate(10, isActive = $$props.isActive);
if ('isStatic' in $$props) $$invalidate(11, isStatic = $$props.isStatic);
if ('isLoading' in $$props) $$invalidate(12, isLoading = $$props.isLoading);
if ('$$scope' in $$props) $$invalidate(15, $$scope = $$props.$$scope);
};
return [
disabled,
color,
size,
isFullwidth,
isOutlined,
isLight,
isInverted,
isRounded,
isHovered,
isFocused,
isActive,
isStatic,
isLoading,
__ref,
getWebComponent,
$$scope,
slots,
bm_button_binding
];
}
class BmButton extends SvelteComponent {
$$prop_def: BmButtonProps;
$$events_def: BmButtonEvents;
$$slot_def: BmButtonSlots;
$on<K extends keyof BmButtonEvents>(type: K, callback: (e: BmButtonEvents[K]) => any): () => void {
return super.$on(type, callback);
}
$set($$props: Partial<BmButtonProps>): void {
super.$set($$props);
}
constructor(options) {
super();
init(this, options, instance, create_fragment, safe_not_equal, {
disabled: 0,
color: 1,
size: 2,
isFullwidth: 3,
isOutlined: 4,
isLight: 5,
isInverted: 6,
isRounded: 7,
isHovered: 8,
isFocused: 9,
isActive: 10,
isStatic: 11,
isLoading: 12,
getWebComponent: 14
});
}
get disabled() {
return this.$$.ctx[0];
}
set disabled(disabled) {
this.$$set({ disabled });
flush();
}
get color() {
return this.$$.ctx[1];
}
set color(color) {
this.$$set({ color });
flush();
}
get size() {
return this.$$.ctx[2];
}
set size(size) {
this.$$set({ size });
flush();
}
get isFullwidth() {
return this.$$.ctx[3];
}
set isFullwidth(isFullwidth) {
this.$$set({ isFullwidth });
flush();
}
get isOutlined() {
return this.$$.ctx[4];
}
set isOutlined(isOutlined) {
this.$$set({ isOutlined });
flush();
}
get isLight() {
return this.$$.ctx[5];
}
set isLight(isLight) {
this.$$set({ isLight });
flush();
}
get isInverted() {
return this.$$.ctx[6];
}
set isInverted(isInverted) {
this.$$set({ isInverted });
flush();
}
get isRounded() {
return this.$$.ctx[7];
}
set isRounded(isRounded) {
this.$$set({ isRounded });
flush();
}
get isHovered() {
return this.$$.ctx[8];
}
set isHovered(isHovered) {
this.$$set({ isHovered });
flush();
}
get isFocused() {
return this.$$.ctx[9];
}
set isFocused(isFocused) {
this.$$set({ isFocused });
flush();
}
get isActive() {
return this.$$.ctx[10];
}
set isActive(isActive) {
this.$$set({ isActive });
flush();
}
get isStatic() {
return this.$$.ctx[11];
}
set isStatic(isStatic) {
this.$$set({ isStatic });
flush();
}
get isLoading() {
return this.$$.ctx[12];
}
set isLoading(isLoading) {
this.$$set({ isLoading });
flush();
}
get getWebComponent(): HTMLBmButtonElement | undefined {
return this.$$.ctx[14];
}
}
export default BmButton; | the_stack |
import { RowModel } from '..'
import { TableFeature } from '../core/instance'
import { BuiltInFilterFn, filterFns } from '../filterFns'
import {
Column,
OnChangeFn,
TableGenerics,
TableInstance,
Row,
Updater,
} from '../types'
import {
functionalUpdate,
isFunction,
makeStateUpdater,
memo,
Overwrite,
} from '../utils'
export type FiltersTableState = {
columnFilters: ColumnFiltersState
globalFilter: any
}
export type ColumnFiltersState = ColumnFilter[]
export type ColumnFilter = {
id: string
value: unknown
}
export type ResolvedColumnFilter<TGenerics extends TableGenerics> = {
id: string
resolvedValue: unknown
filterFn: FilterFn<TGenerics>
}
export type FilterFn<TGenerics extends TableGenerics> = {
(
row: Row<TGenerics>,
columnId: string,
filterValue: any,
addMeta: (meta: TGenerics['FilterMeta']) => void
): boolean
resolveFilterValue?: TransformFilterValueFn<TGenerics>
autoRemove?: ColumnFilterAutoRemoveTestFn<TGenerics>
}
export type TransformFilterValueFn<TGenerics extends TableGenerics> = (
value: any,
column?: Column<TGenerics>
) => unknown
export type ColumnFilterAutoRemoveTestFn<TGenerics extends TableGenerics> = (
value: any,
column?: Column<TGenerics>
) => boolean
export type CustomFilterFns<TGenerics extends TableGenerics> = Record<
string,
FilterFn<TGenerics>
>
export type FilterFnOption<TGenerics extends TableGenerics> =
| 'auto'
| BuiltInFilterFn
| keyof TGenerics['FilterFns']
| FilterFn<TGenerics>
export type FiltersColumnDef<TGenerics extends TableGenerics> = {
filterFn?: FilterFnOption<Overwrite<TGenerics, { Value: any }>>
enableColumnFilter?: boolean
enableGlobalFilter?: boolean
}
export type FiltersColumn<TGenerics extends TableGenerics> = {
getAutoFilterFn: () => FilterFn<TGenerics> | undefined
getFilterFn: () => FilterFn<TGenerics> | undefined
setFilterValue: (updater: Updater<any>) => void
getCanFilter: () => boolean
getCanGlobalFilter: () => boolean
getFacetedRowModel: () => RowModel<TGenerics>
_getFacetedRowModel?: () => RowModel<TGenerics>
getIsFiltered: () => boolean
getFilterValue: () => unknown
getFilterIndex: () => number
getFacetedUniqueValues: () => Map<any, number>
_getFacetedUniqueValues?: () => Map<any, number>
getFacetedMinMaxValues: () => undefined | [number, number]
_getFacetedMinMaxValues?: () => undefined | [number, number]
}
export type FiltersRow<TGenerics extends TableGenerics> = {
columnFilters: Record<string, boolean>
columnFiltersMeta: Record<string, TGenerics['FilterMeta']>
}
export type FiltersOptions<TGenerics extends TableGenerics> = {
enableFilters?: boolean
manualFiltering?: boolean
filterFromLeafRows?: boolean
filterFns?: TGenerics['FilterFns']
getFilteredRowModel?: (instance: TableInstance<any>) => () => RowModel<any>
// Column
onColumnFiltersChange?: OnChangeFn<ColumnFiltersState>
enableColumnFilters?: boolean
// Global
globalFilterFn?: FilterFnOption<TGenerics>
onGlobalFilterChange?: OnChangeFn<any>
enableGlobalFilter?: boolean
getColumnCanGlobalFilter?: (column: Column<TGenerics>) => boolean
// Faceting
getFacetedRowModel?: (
instance: TableInstance<TGenerics>,
columnId: string
) => () => RowModel<TGenerics>
getFacetedUniqueValues?: (
instance: TableInstance<TGenerics>,
columnId: string
) => () => Map<any, number>
getFacetedMinMaxValues?: (
instance: TableInstance<TGenerics>,
columnId: string
) => () => undefined | [number, number]
}
export type FiltersInstance<TGenerics extends TableGenerics> = {
setColumnFilters: (updater: Updater<ColumnFiltersState>) => void
resetColumnFilters: (defaultState?: boolean) => void
// Column Filters
getPreFilteredRowModel: () => RowModel<TGenerics>
getFilteredRowModel: () => RowModel<TGenerics>
_getFilteredRowModel?: () => RowModel<TGenerics>
// Global Filters
setGlobalFilter: (updater: Updater<any>) => void
resetGlobalFilter: (defaultState?: boolean) => void
getGlobalAutoFilterFn: () => FilterFn<TGenerics> | undefined
getGlobalFilterFn: () => FilterFn<TGenerics> | undefined
getGlobalFacetedRowModel: () => RowModel<TGenerics>
_getGlobalFacetedRowModel?: () => RowModel<TGenerics>
getGlobalFacetedUniqueValues: () => Map<any, number>
_getGlobalFacetedUniqueValues?: () => Map<any, number>
getGlobalFacetedMinMaxValues: () => undefined | [number, number]
_getGlobalFacetedMinMaxValues?: () => undefined | [number, number]
}
//
export const Filters: TableFeature = {
getDefaultColumnDef: <
TGenerics extends TableGenerics
>(): FiltersColumnDef<TGenerics> => {
return {
filterFn: 'auto',
}
},
getInitialState: (state): FiltersTableState => {
return {
columnFilters: [],
globalFilter: undefined,
// filtersProgress: 1,
// facetProgress: {},
...state,
}
},
getDefaultOptions: <TGenerics extends TableGenerics>(
instance: TableInstance<TGenerics>
): FiltersOptions<TGenerics> => {
return {
onColumnFiltersChange: makeStateUpdater('columnFilters', instance),
onGlobalFilterChange: makeStateUpdater('globalFilter', instance),
filterFromLeafRows: false,
globalFilterFn: 'auto',
getColumnCanGlobalFilter: column => {
const value = instance
.getCoreRowModel()
.flatRows[0]?._getAllCellsByColumnId()
[column.id]?.getValue()
return typeof value === 'string'
},
}
},
createColumn: <TGenerics extends TableGenerics>(
column: Column<TGenerics>,
instance: TableInstance<TGenerics>
): FiltersColumn<TGenerics> => {
return {
getAutoFilterFn: () => {
const firstRow = instance.getCoreRowModel().flatRows[0]
const value = firstRow?.getValue(column.id)
if (typeof value === 'string') {
return filterFns.includesString
}
if (typeof value === 'number') {
return filterFns.inNumberRange
}
if (typeof value === 'boolean') {
return filterFns.equals
}
if (value !== null && typeof value === 'object') {
return filterFns.equals
}
if (Array.isArray(value)) {
return filterFns.arrIncludes
}
return filterFns.weakEquals
},
getFilterFn: () => {
const userFilterFns = instance.options.filterFns
return isFunction(column.columnDef.filterFn)
? column.columnDef.filterFn
: column.columnDef.filterFn === 'auto'
? column.getAutoFilterFn()
: (userFilterFns as Record<string, any>)?.[
column.columnDef.filterFn as string
] ??
(filterFns[
column.columnDef.filterFn as BuiltInFilterFn
] as FilterFn<TGenerics>)
},
getCanFilter: () => {
return (
(column.columnDef.enableColumnFilter ?? true) &&
(instance.options.enableColumnFilters ?? true) &&
(instance.options.enableFilters ?? true) &&
!!column.accessorFn
)
},
getCanGlobalFilter: () => {
return (
(column.columnDef.enableGlobalFilter ?? true) &&
(instance.options.enableGlobalFilter ?? true) &&
(instance.options.enableFilters ?? true) &&
(instance.options.getColumnCanGlobalFilter?.(column) ?? true) &&
!!column.accessorFn
)
},
getIsFiltered: () => column.getFilterIndex() > -1,
getFilterValue: () =>
instance.getState().columnFilters?.find(d => d.id === column.id)?.value,
getFilterIndex: () =>
instance.getState().columnFilters?.findIndex(d => d.id === column.id) ??
-1,
setFilterValue: value => {
instance.setColumnFilters(old => {
const filterFn = column.getFilterFn()
const previousfilter = old?.find(d => d.id === column.id)
const newFilter = functionalUpdate(
value,
previousfilter ? previousfilter.value : undefined
)
//
if (
shouldAutoRemoveFilter(
filterFn as FilterFn<TGenerics>,
newFilter,
column
)
) {
return old?.filter(d => d.id !== column.id) ?? []
}
const newFilterObj = { id: column.id, value: newFilter }
if (previousfilter) {
return (
old?.map(d => {
if (d.id === column.id) {
return newFilterObj
}
return d
}) ?? []
)
}
if (old?.length) {
return [...old, newFilterObj]
}
return [newFilterObj]
})
},
_getFacetedRowModel:
instance.options.getFacetedRowModel &&
instance.options.getFacetedRowModel(instance, column.id),
getFacetedRowModel: () => {
if (!column._getFacetedRowModel) {
return instance.getPreFilteredRowModel()
}
return column._getFacetedRowModel()
},
_getFacetedUniqueValues:
instance.options.getFacetedUniqueValues &&
instance.options.getFacetedUniqueValues(instance, column.id),
getFacetedUniqueValues: () => {
if (!column._getFacetedUniqueValues) {
return new Map()
}
return column._getFacetedUniqueValues()
},
_getFacetedMinMaxValues:
instance.options.getFacetedMinMaxValues &&
instance.options.getFacetedMinMaxValues(instance, column.id),
getFacetedMinMaxValues: () => {
if (!column._getFacetedMinMaxValues) {
return undefined
}
return column._getFacetedMinMaxValues()
},
// () => [column.getFacetedRowModel()],
// facetedRowModel => getRowModelMinMaxValues(facetedRowModel, column.id),
}
},
createRow: <TGenerics extends TableGenerics>(
row: Row<TGenerics>,
instance: TableInstance<TGenerics>
): FiltersRow<TGenerics> => {
return {
columnFilters: {},
columnFiltersMeta: {},
}
},
createInstance: <TGenerics extends TableGenerics>(
instance: TableInstance<TGenerics>
): FiltersInstance<TGenerics> => {
return {
getGlobalAutoFilterFn: () => {
return filterFns.includesString
},
getGlobalFilterFn: () => {
const { filterFns: userFilterFns, globalFilterFn: globalFilterFn } =
instance.options
return isFunction(globalFilterFn)
? globalFilterFn
: globalFilterFn === 'auto'
? instance.getGlobalAutoFilterFn()
: (userFilterFns as Record<string, any>)?.[
globalFilterFn as string
] ??
(filterFns[
globalFilterFn as BuiltInFilterFn
] as FilterFn<TGenerics>)
},
setColumnFilters: (updater: Updater<ColumnFiltersState>) => {
const leafColumns = instance.getAllLeafColumns()
const updateFn = (old: ColumnFiltersState) => {
return functionalUpdate(updater, old)?.filter(filter => {
const column = leafColumns.find(d => d.id === filter.id)
if (column) {
const filterFn = column.getFilterFn()
if (shouldAutoRemoveFilter(filterFn, filter.value, column)) {
return false
}
}
return true
})
}
instance.options.onColumnFiltersChange?.(updateFn)
},
setGlobalFilter: updater => {
instance.options.onGlobalFilterChange?.(updater)
},
resetGlobalFilter: defaultState => {
instance.setGlobalFilter(
defaultState ? undefined : instance.initialState.globalFilter
)
},
resetColumnFilters: defaultState => {
instance.setColumnFilters(
defaultState ? [] : instance.initialState?.columnFilters ?? []
)
},
getPreFilteredRowModel: () => instance.getCoreRowModel(),
_getFilteredRowModel:
instance.options.getFilteredRowModel &&
instance.options.getFilteredRowModel(instance),
getFilteredRowModel: () => {
if (
instance.options.manualFiltering ||
!instance._getFilteredRowModel
) {
return instance.getPreFilteredRowModel()
}
return instance._getFilteredRowModel()
},
_getGlobalFacetedRowModel:
instance.options.getFacetedRowModel &&
instance.options.getFacetedRowModel(instance, '__global__'),
getGlobalFacetedRowModel: () => {
if (
instance.options.manualFiltering ||
!instance._getGlobalFacetedRowModel
) {
return instance.getPreFilteredRowModel()
}
return instance._getGlobalFacetedRowModel()
},
_getGlobalFacetedUniqueValues:
instance.options.getFacetedUniqueValues &&
instance.options.getFacetedUniqueValues(instance, '__global__'),
getGlobalFacetedUniqueValues: () => {
if (!instance._getGlobalFacetedUniqueValues) {
return new Map()
}
return instance._getGlobalFacetedUniqueValues()
},
_getGlobalFacetedMinMaxValues:
instance.options.getFacetedMinMaxValues &&
instance.options.getFacetedMinMaxValues(instance, '__global__'),
getGlobalFacetedMinMaxValues: () => {
if (!instance._getGlobalFacetedMinMaxValues) {
return
}
return instance._getGlobalFacetedMinMaxValues()
},
}
},
}
export function shouldAutoRemoveFilter<TGenerics extends TableGenerics>(
filterFn?: FilterFn<TGenerics>,
value?: any,
column?: Column<TGenerics>
) {
return (
(filterFn && filterFn.autoRemove
? filterFn.autoRemove(value, column)
: false) ||
typeof value === 'undefined' ||
(typeof value === 'string' && !value)
)
} | the_stack |
import { sp } from "@pnp/sp";
import "@pnp/sp/profiles";
import { IUserInfo } from "../Entities/IUserInfo";
import { WebPartContext } from "@microsoft/sp-webpart-base";
import { MSGraphClient } from "@microsoft/sp-http";
import { IUserPresence } from "../Entities/IUserPresence";
import * as _ from "lodash";
import { PnPClientStorage } from "@pnp/common";
/*************************************************************************************/
// Hook to get user profile information
//*************************************************************************************/
export const useGetUserProperties = async (
userEmail: string,
context?: WebPartContext
): Promise<any> => {
/*************************************************************************************/
// vars and const
//*************************************************************************************/
const _MSGraphClient: MSGraphClient = await context.msGraphClientFactory.getClient();
const loginName: string = `i:0#.f|membership|${userEmail}`;
const storage = new PnPClientStorage();
let _managersList: IUserInfo[] = [];
let _reportsList: IUserInfo[] = [];
/* sp.setup({
spfxContext: context
}); */
/*************************************************************************************/
// Functions
//*************************************************************************************//
// function Get UserId from UserProfileProperties
//*************************************************************************************//
const getUserId = async (userProfileProperties: any[]): Promise<string> => {
// Get User Properties
let props = {};
userProfileProperties.forEach((prop) => {
props[prop.Key] = prop.Value;
});
// Get UserID
return props["msOnline-ObjectId"];
};
//*************************************************************************************//
// functions convert image to Base64
//*************************************************************************************//
const getImageBase64 = (pictureUrl: string, userId:string): Promise<string> => {
return new Promise((resolve, reject) => {
const value = storage.local.get(userId);
if (value) {
resolve(value);
}
let image = new Image();
image.addEventListener("load", () => {
let tempCanvas = document.createElement("canvas");
(tempCanvas.width = image.width),
(tempCanvas.height = image.height),
tempCanvas.getContext("2d").drawImage(image, 0, 0);
let base64Str;
try {
base64Str = tempCanvas.toDataURL("image/png");
} catch (e) {
return "";
}
storage.local.put(userId, base64Str);
resolve(base64Str);
});
image.src = pictureUrl;
});
};
//*************************************************************************************//
// function Get Users Presence
//*************************************************************************************//
const getUserPresence = async (
userObjIds: string[]
): Promise<IUserPresence[]> => {
// Get presences for Users Ids
const _presence: any = await _MSGraphClient
.api(`/communications/getPresencesByUserId`)
.version("beta")
.post({ ids: userObjIds });
return _presence.value;
};
//*************************************************************************************//
// Get User Managers
//*************************************************************************************//
const getManagers = async (extendedManagers:string[] ) => {
let _managersObjIds: string[] = [];
// Get Managers
for (const _manager of extendedManagers) {
// get Profile for Manager
const _profile: any = await sp.profiles
.usingCaching()
.getPropertiesFor(_manager);
// Get Object Id from userProperties
const _managerObjId: string = await getUserId(
_profile.UserProfileProperties
);
_managersObjIds.push(_managerObjId);
// Add manager to list
_managersList.push({
displayName: _profile.DisplayName as string,
email: _profile.Email as string,
title: _profile.Title as string,
pictureUrl: await getImageBase64(
`/_layouts/15/userphoto.aspx?size=M&accountname=${_profile.Email}`,_managerObjId
),
id: _managerObjId,
// presence: await getUserPresence(_profile.UserProfileProperties),
userUrl: _profile.UserUrl,
});
}
//*************************************************************************************//
// Get presence for all managers
//*************************************************************************************//
const _managersPresences: IUserPresence[] = await getUserPresence(
_managersObjIds
);
await updateManagersPresence(_managersPresences);
};
//************************************************************************************//
// function Update List os Managers with presence status
//************************************************************************************//
const updateManagersPresence = async (_managersPresences: IUserPresence[]):Promise<IUserInfo[]> => {
if (_managersPresences.length > 0) {
for (const _presence of _managersPresences) {
const i = _.findIndex(_managersList, (v) => {
return v.id == _presence.id;
});
_managersList[i] = {
..._managersList[i],
presence: {
activity: _presence.activity,
availability: _presence.availability,
},
};
}
}
return _managersList;
};
//************************************************************************************//
// Get Direct Reports
//*************************************************************************************//
const getDirectReports = async (directReports:string[] ) => {
let _userReportObjIds: string[] = [];
for (const _userReport of directReports) {
const _profile: any = await sp.profiles
.usingCaching()
.getPropertiesFor(_userReport);
const _userReportObjId: string = await getUserId(
_profile.UserProfileProperties
);
_userReportObjIds.push(_userReportObjId);
// Get Presence to the user
_reportsList.push({
displayName: _profile.DisplayName as string,
email: _profile.Email as string,
title: _profile.Title as string,
pictureUrl: await getImageBase64(
`/_layouts/15/userphoto.aspx?size=M&accountname=${_profile.Email}`, _userReportObjId
),
id: _userReportObjId,
userUrl: _profile.UserUrl,
});
}
//*************************************************************************************//
// Get presence for all direct Reports and update list
//*************************************************************************************//
const _directReportsPresences: IUserPresence[] = await getUserPresence(
_userReportObjIds
);
// Update Array of direct reports with presence
await updateDirectReportsPresence(_directReportsPresences);
};
//*************************************************************************************//
// Funcion Update List os Direct Reports with presence status
//*************************************************************************************//
const updateDirectReportsPresence = async (directReportsPresences: IUserPresence[]):Promise<IUserInfo[]> => {
// Update Array of direct reports with presence
if (directReportsPresences.length > 0) {
for (const _presence of directReportsPresences) {
const i = _.findIndex(_reportsList, (v) => {
return v.id == _presence.id;
});
_reportsList[i] = {
..._reportsList[i],
presence: {
activity: _presence.activity,
availability: _presence.availability,
},
};
}
}
return _reportsList;
};
//*************************************************************************************//
// Get news status of managers and direct Reports
//*************************************************************************************//
const getPresenceStatus = async ( managersList: IUserInfo[],reportsList:IUserInfo[], currentUserProfile: any):Promise<any> => {
const _managersObjIds:string[] = [];
const _userReportObjIds:string[] = [];
// get managersObjIds
for (const _manager of managersList){
_managersObjIds.push(_manager.id);
}
// get new status of Managers and update list os managers
const _managersPresences: IUserPresence[] = await getUserPresence(_managersObjIds);
managersList = await updateManagersPresence(_managersPresences);
// Get objsId of DirectReports
for (const _userReport of reportsList){
_userReportObjIds.push(_userReport.id);
}
// get new status of Direct Reports and update list of direct Reports
const _directReportsPresences: IUserPresence[] = await getUserPresence(
_userReportObjIds
);
reportsList = await updateDirectReportsPresence(_directReportsPresences);
// Get update status for current user
const _currentUserPresenceUpd: IUserPresence[] = await getUserPresence([currentUserProfile.id]);
currentUserProfile.presence = { activity: _currentUserPresenceUpd[0].activity, availability: _currentUserPresenceUpd[0].availability};
return { managersList, currentUserProfile, reportsList };
};
//*************************************************************************************//
// End Functions
//*************************************************************************************//
//*************************************************************************************//
// MAIN - Get Current User Profile
//*************************************************************************************//
const _currentUserProfile: any = await sp.profiles
.usingCaching()
.getPropertiesFor(loginName);
console.log(_currentUserProfile);
// get Managers and Direct Reports
const _extendedManagers: string[] = _currentUserProfile.ExtendedManagers;
const _directReports: string[] = _currentUserProfile.DirectReports;
// Get userObjId
const _currentUserObjId: string = await getUserId( _currentUserProfile.UserProfileProperties);
_currentUserProfile.id = _currentUserObjId;
// Get Current user Picture and User Presence
_currentUserProfile.PictureUrl = await getImageBase64(
`/_layouts/15/userphoto.aspx?size=M&accountname=${_currentUserProfile.Email}`, _currentUserObjId);
// get Current User Presence
const _currentUserPresence: IUserPresence[] = await getUserPresence([_currentUserObjId]);
_currentUserProfile.presence = { activity: _currentUserPresence[0].activity, availability: _currentUserPresence[0].availability};
// Get Manager if exists
if (_extendedManagers.length > 0) await getManagers(_extendedManagers);
// Get Direct Reports if exists
if (_directReports.length > 0) await getDirectReports(_directReports);
//*************************************************************************************//
// Return objects
// _managersList , _currentUserProfile , DirectReports
//*************************************************************************************//
return { _managersList, _currentUserProfile, _reportsList, getPresenceStatus };
}; | the_stack |
import {Injectable} from "@angular/core";
import {DbAbstractionLayer} from "@nodeart/dal/index";
/**
* Product service. Has different methods for manipulating with products and its properties.
*/
@Injectable()
export class ProductService{
/**
* ElasticSearch index name. For default "firebase"
*/
public esIndex = 'firebase';
constructor(private dal: DbAbstractionLayer){
}
/**
* Get product by id
* @param {string} id id of product
*/
getOneProduct(id: string){
let queryObject = {
"query": {
"term":{
"_id": id
}
}
};
return this.dal.requestFullData(this.esIndex, 'product', queryObject);
}
/**
* Return Observable array of products
* @param {string} categoryId products category id
* @param {Array} currentPrice ranges of price(minimum price, maximum price)
* @param {Array} attributes attributes ids array
* @param {Array} tags tags ids array
* @param {integer} size number of returned products
* @param {integer} from offset of products
*
* @returns {Observable} Observable of products
*/
searchProducts(categoryId: string, currentPrice, attributes: any[], tags: any[], size: number, from: number){
let queryObject = {
size: size,
from: from,
query: {
bool: {
must: [
{
bool: {
should: []
}
},
{
match: {
"category" : categoryId
}
},
{
"range" : {
"price" : {
"gte" : currentPrice[0],
"lte" : currentPrice[1]
}
}
}
]
}
}
};
for(let i = 0; i < attributes.length; i++){
let key = "attributes." + attributes[i]['attrId'];
queryObject.query.bool.must[0]['bool'].should.push({
multi_match: {
query: attributes[i]['valueName'],
fields: [key]
}
});
}
for(let i = 0; i < tags.length; i++){
let key = "tags";
queryObject.query.bool.must[0]['bool'].should.push({
multi_match: {
query: tags[i]['id'],
fields: [key]
}
});
}
console.log(queryObject);
return this.dal.requestFullData(this.esIndex, 'product', queryObject);
}
/**
* Return price ranges of products in specific category. Properties:
* ```
* 1. max_price
* 2. min_price
* ```
* @param {string} categoryId category id
* @returns {Observable} Observable of object {max_price, min_price}
*/
getPriceRanges(categoryId){
let queryObject = {
aggs : {
max_price : { max : { field : "price" } },
min_price : { min : { field : "price" } }
},
query: {
bool: {
should: {
match: {
category: categoryId
}
}
}
}
};
console.log(JSON.stringify(queryObject));
return this.dal.requestFullData(this.esIndex, 'product', queryObject);
}
/**
* Return filtered products with price ranges
* @param {Array} priceRange first element - min price, second - max price
* @param {Array} attributes
* @param {Array} tags array of tags ids
* @param {integer} size number of returned products
* @param {integer} from offset of products
*
* @returns {Observable} Observable of products
*/
filterProducts(priceRange: number[], attributes : any[], tags: any[], size, offset){
let queryObject = {
size: size,
from: offset,
query: {
bool: {
must: [
{
bool: {
should: []
}
},
{
"range" : {
"price" : {
"gte" : priceRange[0],
"lte" : priceRange[1]
}
}
}
]
}
}
};
for(let i = 0; i < attributes.length; i++){
let key = "attributes." + attributes[i]['attrId'];
queryObject.query.bool.must[0]['bool'].should.push({
multi_match: {
query: attributes[i]['valueName'],
fields: [key]
}
});
}
for(let i = 0; i < tags.length; i++){
let key = "tags";
queryObject.query.bool.must[0]['bool'].should.push({
multi_match: {
query: tags[i]['id'],
fields: [key]
}
});
}
return this.dal.requestFullData(this.esIndex, 'product', queryObject);
}
/**
* Return general category data by id
* @param {string} generalCategoryId general category id
*
* @returns {Observable} Observable of general category data
*/
getGeneralCategory(generalCategoryId){
let queryObj = {
"query": {
"term":{
"_id": generalCategoryId.toString()
}
}
};
return this.dal.requestData(this.esIndex, 'general-category', queryObj);
}
/**
* Retunr gategory data by id
* @param categoryId category id
*
* @returns {Observable} Observable of category data
*/
getCategory(categoryId){
let queryObj = {
"query": {
"term":{
"_id": categoryId.toString()
}
}
};
return this.dal.requestFullData(this.esIndex, 'category', queryObj);
}
/**
* Return all general categories
*
* @returns {Observable} Observable array of general categories
*/
getGeneralCategories(){
let queryObj = {
"query": {
"match_all": {}
}
};
return this.dal.requestData(this.esIndex, 'general-category', queryObj);
}
/**
* Return categories by general category id
* @param {string} categoryId category id
*
* @returns {Observable} Observable array of categories
*/
getCategories(categoryId){
let queryObj;
if(categoryId == ''){
queryObj = {
query: {
filtered: {
filter: {
bool: {
must_not: [
{
exists: {
field: "parentId"
}
}
]
}
}
}
}
}
} else {
queryObj = {
query: {
term: {
parentId: categoryId
}
}
};
}
console.log(JSON.stringify(queryObj));
return this.dal.requestData(this.esIndex, 'category', queryObj);
}
/**
* Get products from multiple categories
* @param {Array} categoryIds array of category ids
* @param {integer} size number of returned products
* @param {integer} from offset of products
*
* @returns {Observable} Observable of array of products
*/
getProductsByCategoryIds(categoryIds, size, from){
let queryObj = {
size: size,
from: from,
query: {
constant_score: {
filter: {
terms: {
category: categoryIds
}
}
}
}
}
return this.dal.requestData(this.esIndex, 'product', queryObj);
}
/**
* Return products by part of name. You pass part of name and method return Observable
* of all product that match your query
* @param {string} query part of product name
* @param {integer} size number of returned products
*
* @returns {Observable} Observable of data
*/
getProducts(query, size?){
query = query.split(' ');
let queryObj = {
"query": {
"bool": {
"must": [
]
}
}
};
if(size) {
queryObj['size'] = size;
}
query.map(term => {
term = term.replace(/\(|\)/g, '');
queryObj.query.bool.must.push({
"wildcard": {
"name": term.toLowerCase() + "*"
}
});
});
return this.dal.requestFullData(this.esIndex, 'product', queryObj);
}
/**
* Return number of total pages required to put all products from specific category
* @param {string} categoryId category id
* @param {Array} currentPrice ranges of price(minimum price, maximum price)
* @param {Array} attributes attributes ids array
* @param {Array} tags tags ids array
*
* @returns {Observable} Observable of total items
*/
getTotalPages(categoryId, currentPrice, attributes: any[], tags: any[]){
let queryObject = {
size: 0,
query: {
bool: {
must: [
{
bool: {
should: []
}
},
{
match: {
"category" : categoryId
}
},
{
"range" : {
"price" : {
"gte" : currentPrice[0],
"lte" : currentPrice[1]
}
}
}
]
}
}
};
for(let i = 0; i < attributes.length; i++){
let key = "attributes." + attributes[i]['attrId'];
queryObject.query.bool.must[0]['bool'].should.push({
multi_match: {
query: attributes[i]['valueName'],
fields: [key]
}
});
}
for(let i = 0; i < tags.length; i++){
let key = "tags";
queryObject.query.bool.must[0]['bool'].should.push({
multi_match: {
query: tags[i]['id'],
fields: [key]
}
});
}
console.log(JSON.stringify(queryObject));
return this.dal.requestItemsTotal(this.esIndex, 'product', queryObject);
}
/**
* Return number of total pages required to put all products from multiple categories
* @param {Array} categoryIds array of category ids
*
* @returns {Observable} Observable of total items
*/
getTotalPagesByCategoryIds(categoryIds){
let queryObj = {
size: 0,
query: {
constant_score: {
filter: {
terms: {
category: categoryIds
}
}
}
}
};
return this.dal.requestItemsTotal(this.esIndex, 'product', queryObj);
}
/**
* Return one attribute by id
* @param {string} attributeId attribute id
*
* @returns {Observable} Observable of attribute data
*/
getOneAttribute(attributeId){
let queryObj = {
"query": {
"term": {
"_id": attributeId
}
}
}
return this.dal.requestData(this.esIndex, 'attributes', queryObj);
}
/**
* Return one tag by id
* @param {string} tagId tag id
*
* @returns {Observable} Observable of tag data
*/
getOneTag(tagId){
let queryObj = {
"query": {
"term": {
"_id": tagId
}
}
};
return this.dal.requestData(this.esIndex, 'tags', queryObj);
}
} | the_stack |
import { Component, OnInit, Input } from "@angular/core";
import { ActivatedRoute, Router } from "@angular/router";
import { GlobalVarsService } from "../global-vars.service";
import { BackendApiService, ProfileEntryResponse } from "../backend-api.service";
import { sprintf } from "sprintf-js";
import { SwalHelper } from "../../lib/helpers/swal-helper";
import * as _ from "lodash";
import { Title } from "@angular/platform-browser";
import { environment } from "src/environments/environment";
class Messages {
static INCORRECT_PASSWORD = `The password you entered was incorrect.`;
static CONNECTION_PROBLEM = `There is currently a connection problem. Is your connection to your node healthy?`;
static INSUFFICIENT_BALANCE = `You don't have enough DeSo to process the transaction. Try reducing the fee rate.`;
static SEND_DESO_MIN = `You must send a non-zero amount of DeSo`;
static INVALID_PUBLIC_KEY = `The public key you entered is invalid`;
}
// TODO: Cleanup - separate this into multiple components
@Component({
selector: "admin",
templateUrl: "./admin.component.html",
styleUrls: ["./admin.component.scss"],
})
export class AdminComponent implements OnInit {
globalVars: GlobalVarsService;
adminPosts = [];
adminPostsByDESO = [];
activePosts = [];
activeTab: string;
activePostTab: string;
loadingPosts = false;
loadingMorePosts = false;
loadingMempoolStats = true;
loadingGlobalParams = true;
loadingPostsByDESO = false;
searchingForPostsByDESO = false;
@Input() isMobile = false;
blacklistPubKeyOrUsername = "";
graylistPubKeyOrUsername = "";
unrestrictPubKeyOrUsername = "";
whitelistPubKeyOrUsername = "";
unwhitelistPubKeyOrUsername = "";
removePhonePubKeyorUsername = "";
updateProfileSuccessType = "";
whitelistUpdateSuccess = false;
unwhitelistUpdateSuccess = false;
clearSuccessTimeout: any;
whitelistSuccessTimeout: any;
unwhitelistSuccessTimeout: any;
submittingProfileUpdateType = "";
submittingBlacklistUpdate = false;
submittingGraylistUpdate = false;
submittingUnrestrictUpdate = false;
submittingWhitelistUpdate = false;
submittingUnwhitelistUpdate = false;
submittingEvictUnminedBitcoinTxns = false;
submittingUSDToDeSoReserveExchangeRateUpdate = false;
submittingBuyDeSoFeeRate = false;
submittingRemovePhone = false;
dbDetailsOpen = false;
dbDetailsLoading = false;
userMetadataMap = {};
usernameMap = {};
userMetadataMapLength = 0;
mempoolSummaryStats: any = {};
mempoolTxnCount = 0;
bitcoinExchangeRate: number;
usdToDeSoReserveExchangeRate: number;
buyDeSoFeeRate: number;
updatingBitcoinExchangeRate = false;
updatingGlobalParams = false;
updatingUSDToBitcoin = false;
updatingCreateProfileFee = false;
updatingMinimumNetworkFee = false;
updatingMaxCopiesPerNFT = false;
updatingCreateNFTFeeNanos = false;
feeRateDeSoPerKB = (1000 / 1e9).toFixed(9); // Default fee rate.
bitcoinBlockHashOrHeight = "";
evictBitcoinTxnHashes = "";
usernameToVerify = "";
usernameForWhomToRemoveVerification = "";
usernameToFetchVerificationAuditLogs = "";
removingNilPosts = false;
submittingReprocessRequest = false;
submittingRemovalRequest = false;
submittingVerifyRequest = false;
mempoolTotalBytes = 0;
loadingNextBlockStats = false;
nextBlockStats: any = {};
globalParams: any = {};
updateGlobalParamsValues = {
USDPerBitcoin: 0,
CreateProfileFeeNanos: 0,
MinimumNetworkFeeNanosPerKB: 0,
MaxCopiesPerNFT: 0,
CreateNFTFeeNanos: 0,
};
verifiedUsers: any = [];
usernameVerificationAuditLogs: any = [];
loadingVerifiedUsers = false;
loadingVerifiedUsersAuditLog = false;
adminTabs = ["Posts", "Profiles", "NFTs", "Tutorial", "Network", "Mempool", "Wyre", "Jumio", "Referral Program"];
POSTS_TAB = "Posts";
POSTS_BY_DESO_TAB = "Posts By DESO";
adminPostTabs = [this.POSTS_TAB, this.POSTS_BY_DESO_TAB];
// Fields for SwapIdentity
submittingSwapIdentity = false;
swapIdentityFromUsernameOrPublicKey = "";
swapIdentityToUsernameOrPublicKey = "";
// Fields for UpdateUsername
submittingUpdateUsername = false;
changeUsernamePublicKey = "";
usernameTarget = "";
userMetadataToUpdate = null;
userProfileEntryResponseToUpdate: ProfileEntryResponse = null;
searchedForPubKey = false;
// These are the options used to populate the dropdown for selecting a time window over which we want to fetch
// posts ordered by deso.
timeWindowOptions = {
"15m": 15,
"30m": 30,
"60m": 60,
};
// This is a variable to track the currently selected time window.
selectedTimeWindow = 60;
// Fields for getting user admin data
submittingGetUserAdminData = false;
getUserAdminDataPublicKey = "";
getUserAdminDataResponse = null;
constructor(
private _globalVars: GlobalVarsService,
private router: Router,
private route: ActivatedRoute,
private backendApi: BackendApiService,
private titleService: Title
) {
this.globalVars = _globalVars;
}
ngOnInit() {
if (this.globalVars.showSuperAdminTools()) {
this.adminTabs.push("Super");
this.adminTabs.push("Node Fees");
}
this.route.queryParams.subscribe((queryParams) => {
if (queryParams.adminTab) {
this.activeTab = queryParams.adminTab;
} else {
this.activeTab = "Posts";
}
});
// load data
this._updateNodeInfo();
// Get some posts to show the user.
this.loadingPosts = true;
this.activePostTab = this.POSTS_TAB;
this._loadPosts();
this._loadPostsByDESO();
// Get the latest mempool stats.
this._loadMempoolStats();
this._loadNextBlockStats();
this._loadGlobalParams();
// Get current fee percentage and reserve USD to DeSo exchange price.
this._loadBuyDeSoFeeRate();
this._loadUSDToDeSoReserveExchangeRate();
this.titleService.setTitle(`Admin - ${environment.node.name}`);
}
_updateNodeInfo() {
if (!this.globalVars.loggedInUser) {
return;
}
this.backendApi
.NodeControl(this.globalVars.localNode, this.globalVars.loggedInUser.PublicKeyBase58Check, "", "get_info")
.subscribe(
(res: any) => {
if (res == null || res.DeSoStatus == null) {
return;
}
this.globalVars.nodeInfo = res;
},
(error) => {
console.error(error);
this.globalVars.nodeInfo = null;
}
);
}
_tabClicked(tabName: any) {
this.router.navigate([], {
relativeTo: this.route,
queryParams: { adminTab: this.activeTab },
queryParamsHandling: "merge",
});
}
_postTabClicked(postTabName: string) {
this.activePostTab = postTabName;
if (postTabName === this.POSTS_BY_DESO_TAB) {
this.activePosts = this.adminPostsByDESO;
} else {
this.activePosts = this.adminPosts;
}
}
_searchPostsByDESO() {
this.searchingForPostsByDESO = true;
this._loadPostsByDESO();
}
_loadPostsByDESO() {
this.loadingPostsByDESO = true;
// Get the reader's public key for the request.
let readerPubKey = "";
if (this.globalVars.loggedInUser) {
readerPubKey = this.globalVars.loggedInUser.PublicKeyBase58Check;
}
if (!this.selectedTimeWindow) {
this.selectedTimeWindow = 60;
}
this.backendApi
.GetPostsStateless(
this.globalVars.localNode,
"" /*PostHash*/,
readerPubKey /*ReaderPublicKeyBase58Check*/,
"" /*OrderBy*/,
parseInt(this.globalVars.filterType) /*StartTstampSecs*/,
"",
50 /*NumToFetch*/,
false /*FetchSubcomments*/,
false /*GetPostsForFollowFeed*/,
false /*GetPostsForGlobalWhitelist*/,
true,
false /*MediaRequired*/,
this.selectedTimeWindow,
true /*AddGlobalFeedBool*/
)
.subscribe(
(res) => {
this.adminPostsByDESO = res.PostsFound;
if (this.activePostTab === this.POSTS_BY_DESO_TAB) {
this.activePosts = this.adminPostsByDESO;
}
},
(err) => {
console.error(err);
this.globalVars._alertError("Error loading posts: " + this.backendApi.stringifyError(err));
}
)
.add(() => {
this.loadingPostsByDESO = false;
this.searchingForPostsByDESO = false;
});
}
_loadPosts() {
this.loadingMorePosts = true;
// Get the reader's public key for the request.
let readerPubKey = "";
if (this.globalVars.loggedInUser) {
readerPubKey = this.globalVars.loggedInUser.PublicKeyBase58Check;
}
// Get the last post hash in case this is a "load more" request.
let lastPostHash = "";
if (this.adminPosts.length > 0) {
lastPostHash = this.adminPosts[this.adminPosts.length - 1].PostHashHex;
}
this.backendApi
.GetPostsStateless(
this.globalVars.localNode,
lastPostHash /*PostHash*/,
readerPubKey /*ReaderPublicKeyBase58Check*/,
"newest" /*OrderBy*/,
parseInt(this.globalVars.filterType) /*StartTstampSecs*/,
"",
50 /*NumToFetch*/,
false /*FetchSubcomments*/,
false /*GetPostsForFollowFeed*/,
false /*GetPostsForGlobalWhitelist*/,
false,
false /*MediaRequired*/,
0,
true /*AddGlobalFeedBool*/
)
.subscribe(
(res) => {
if (lastPostHash != "") {
this.adminPosts = this.adminPosts.concat(res.PostsFound);
} else {
this.adminPosts = res.PostsFound;
}
if (this.activePostTab === this.POSTS_TAB) {
this.activePosts = this.adminPosts;
}
},
(err) => {
console.error(err);
this.globalVars._alertError("Error loading posts: " + this.backendApi.stringifyError(err));
}
)
.add(() => {
this.loadingMorePosts = false;
this.loadingPosts = false;
});
}
_loadMempoolStats() {
console.log("Loading mempool stats...");
this.loadingMempoolStats = true;
this.backendApi
.AdminGetMempoolStats(this.globalVars.localNode, this.globalVars.loggedInUser.PublicKeyBase58Check)
.subscribe(
(res) => {
this.mempoolSummaryStats = res.TransactionSummaryStats;
this.mempoolTxnCount = _.sumBy(Object.values(this.mempoolSummaryStats), (o) => {
return o["Count"];
});
this.mempoolTotalBytes = _.sumBy(Object.values(this.mempoolSummaryStats), (o) => {
return o["TotalBytes"];
});
},
(err) => {
console.log(err);
}
)
.add(() => {
this.loadingMempoolStats = true;
});
}
_loadVerifiedUsers() {
this.loadingVerifiedUsers = true;
console.log("Loading verified users...");
this.backendApi
.AdminGetVerifiedUsers(this.globalVars.localNode, this.globalVars.loggedInUser.PublicKeyBase58Check)
.subscribe(
(res) => {
this.verifiedUsers = res.VerifiedUsers;
},
(err) => {
console.log(err);
}
)
.add(() => {
this.loadingVerifiedUsers = false;
});
}
_loadVerifiedUsersAuditLog() {
this.loadingVerifiedUsersAuditLog = true;
console.log("Loading username verification audit log...");
this.backendApi
.AdminGetUsernameVerificationAuditLogs(
this.globalVars.localNode,
this.globalVars.loggedInUser.PublicKeyBase58Check,
this.usernameToFetchVerificationAuditLogs
)
.subscribe(
(res) => {
this.usernameVerificationAuditLogs = res.VerificationAuditLogs;
console.log(this.usernameVerificationAuditLogs);
},
(err) => {
console.log(err);
}
)
.add(() => {
this.loadingVerifiedUsersAuditLog = false;
});
}
_loadNextBlockStats() {
console.log("Loading stats for next block...");
this.loadingNextBlockStats = true;
// The GetBlockTemplate endpoint requires a username so we have two randomly
// generated pub keys to use as dummies in main / test net.
let dummyPubKey = "BC1YLgqAkAJ4sX2YGD85j9rEpTqDrAkgLoXwv6oTzaCyZt3cDpqk8hy";
if (this.globalVars.isTestnet) {
dummyPubKey = "tBCKYKKdGQpCUYaG2pGy6LcNDeydSXYRHV4phywuc6bZANavsx3Y5f";
}
this.backendApi
.GetBlockTemplate(this.globalVars.localNode, dummyPubKey)
.subscribe(
(res) => {
this.nextBlockStats = res.LatestBlockTemplateStats;
},
(err) => {
console.log(err);
}
)
.add(() => {
this.loadingNextBlockStats = false;
});
}
_loadGlobalParams() {
this.loadingGlobalParams = true;
this.backendApi
.GetGlobalParams(this.globalVars.localNode, this.globalVars.loggedInUser.PublicKeyBase58Check)
.subscribe(
(res) => {
this.globalParams = {
USDPerBitcoin: res.USDCentsPerBitcoin / 100,
CreateProfileFeeNanos: res.CreateProfileFeeNanos / 1e9,
MinimumNetworkFeeNanosPerKB: res.MinimumNetworkFeeNanosPerKB,
MaxCopiesPerNFT: res.MaxCopiesPerNFT,
CreateNFTFeeNanos: res.CreateNFTFeeNanos / 1e9,
};
this.updateGlobalParamsValues = this.globalParams;
},
(err) => {
this.globalVars._alertError("Error global params: " + this.backendApi.stringifyError(err));
}
)
.add(() => {
this.loadingGlobalParams = false;
});
}
_loadBuyDeSoFeeRate(): void {
this.backendApi.GetBuyDeSoFeeBasisPoints(this.globalVars.localNode).subscribe(
(res) => (this.buyDeSoFeeRate = res.BuyDeSoFeeBasisPoints / 100),
(err) => console.log(err)
);
}
_loadUSDToDeSoReserveExchangeRate(): void {
this.backendApi.GetUSDCentsToDeSoReserveExchangeRate(this.globalVars.localNode).subscribe(
(res) => (this.usdToDeSoReserveExchangeRate = res.USDCentsPerDeSo / 100),
(err) => console.log(err)
);
}
_toggleDbDetails() {
if (this.dbDetailsLoading) {
// If we are currently loading the db details, return.
return;
}
if (this.dbDetailsOpen) {
// If the db details are open close them and return.
this.dbDetailsOpen = false;
return;
}
// If we pass the above checks, load the db details.
this.dbDetailsLoading = true;
this.backendApi
.AdminGetAllUserGlobalMetadata(
this.globalVars.localNode,
this.globalVars.loggedInUser.PublicKeyBase58Check,
1000 // NumToFetch
)
.subscribe(
(res) => {
this.userMetadataMap = res.PubKeyToUserGlobalMetadata;
this.usernameMap = res.PubKeyToUsername;
this.userMetadataMapLength = Object.keys(this.userMetadataMap).length;
},
(err) => {
this.globalVars._alertError(JSON.stringify(err.error));
}
)
.add(() => {
this.dbDetailsOpen = true;
this.dbDetailsLoading = false;
});
}
_removeNilPosts() {
this.removingNilPosts = true;
this.backendApi
.AdminRemoveNilPosts(this.globalVars.localNode, this.globalVars.loggedInUser.PublicKeyBase58Check, 1000)
.subscribe(
() => {
this.removingNilPosts = false;
},
(err) => {
this.globalVars._alertError(JSON.stringify(err.error));
}
);
}
updateProfileModerationLevel(level: string) {
let targetPubKeyOrUsername = "";
let pubKey = "";
let username = "";
let removeEverywhere = false;
let removeFromLeaderboard = false;
this.updateProfileSuccessType = "";
clearTimeout(this.clearSuccessTimeout);
// Determine what variables to set based on the button pressed.
if (level === "blacklist") {
console.log("Blacklisting Pub Key: " + this.blacklistPubKeyOrUsername);
targetPubKeyOrUsername = this.blacklistPubKeyOrUsername;
removeEverywhere = true;
removeFromLeaderboard = true;
this.submittingBlacklistUpdate = true;
} else if (level === "graylist") {
console.log("Graylisting Pub Key: " + this.graylistPubKeyOrUsername);
targetPubKeyOrUsername = this.graylistPubKeyOrUsername;
removeEverywhere = false;
removeFromLeaderboard = true;
this.submittingGraylistUpdate = true;
} else if (level === "unrestrict") {
console.log("Unrestricting Pub Key: " + this.unrestrictPubKeyOrUsername);
targetPubKeyOrUsername = this.unrestrictPubKeyOrUsername;
removeEverywhere = false;
removeFromLeaderboard = false;
this.submittingUnrestrictUpdate = true;
} else {
console.log("Cannot set moderation level to: " + level);
return;
}
// Decipher whether the target string is a pub key or username.
if (this.globalVars.isMaybePublicKey(targetPubKeyOrUsername)) {
pubKey = targetPubKeyOrUsername;
} else {
username = targetPubKeyOrUsername;
}
// Fire off the request.
this.submittingProfileUpdateType = level;
this.backendApi
.AdminUpdateUserGlobalMetadata(
this.globalVars.localNode,
this.globalVars.loggedInUser.PublicKeyBase58Check,
pubKey,
username,
true,
removeEverywhere,
removeFromLeaderboard,
false,
false,
false
)
.subscribe(
(res) => {
this.updateProfileSuccessType = level;
this.clearSuccessTimeout = setTimeout(() => {
this.updateProfileSuccessType = "";
}, 1000);
},
(err) => {
this.globalVars._alertError(JSON.stringify(err.error));
}
)
.add(() => {
if (level === "blacklist") {
this.submittingBlacklistUpdate = false;
this.blacklistPubKeyOrUsername = "";
} else if (level === "graylist") {
this.submittingGraylistUpdate = false;
this.graylistPubKeyOrUsername = "";
} else if (level === "unrestrict") {
this.submittingUnrestrictUpdate = false;
this.unrestrictPubKeyOrUsername = "";
}
});
}
whitelistClicked() {
let pubKey = "";
let username = "";
this.submittingWhitelistUpdate = true;
clearTimeout(this.whitelistSuccessTimeout);
// Decipher whether the target string is a pub key or username.
if (this.globalVars.isMaybePublicKey(this.whitelistPubKeyOrUsername)) {
pubKey = this.whitelistPubKeyOrUsername;
} else {
username = this.whitelistPubKeyOrUsername;
}
this.backendApi
.AdminUpdateUserGlobalMetadata(
this.globalVars.localNode,
this.globalVars.loggedInUser.PublicKeyBase58Check,
pubKey,
username,
false,
false,
false,
true,
true,
false
)
.subscribe(
(res) => {
this.whitelistUpdateSuccess = true;
this.whitelistSuccessTimeout = setTimeout(() => {
this.whitelistUpdateSuccess = false;
}, 1000);
},
(err) => {
this.globalVars._alertError(JSON.stringify(err.error));
}
)
.add(() => {
this.submittingWhitelistUpdate = false;
this.whitelistPubKeyOrUsername = "";
});
}
unwhitelistClicked() {
let pubKey = "";
let username = "";
this.submittingUnwhitelistUpdate = true;
clearTimeout(this.unwhitelistSuccessTimeout);
// Decipher whether the target string is a pub key or username.
if (this.globalVars.isMaybePublicKey(this.unwhitelistPubKeyOrUsername)) {
pubKey = this.unwhitelistPubKeyOrUsername;
} else {
username = this.unwhitelistPubKeyOrUsername;
}
this.backendApi
.AdminUpdateUserGlobalMetadata(
this.globalVars.localNode,
this.globalVars.loggedInUser.PublicKeyBase58Check,
pubKey,
username,
false,
false,
false,
true,
false,
false
)
.subscribe(
(res) => {
this.unwhitelistUpdateSuccess = true;
this.unwhitelistSuccessTimeout = setTimeout(() => {
this.unwhitelistUpdateSuccess = false;
}, 1000);
},
(err) => {
this.globalVars._alertError(JSON.stringify(err.error));
}
)
.add(() => {
this.submittingUnwhitelistUpdate = false;
this.unwhitelistPubKeyOrUsername = "";
});
}
submitRemovePhoneNumber() {
const targetPubKeyOrUsername = this.removePhonePubKeyorUsername;
let pubKey = "";
let username = "";
if (this.globalVars.isMaybePublicKey(targetPubKeyOrUsername)) {
pubKey = targetPubKeyOrUsername;
} else {
username = targetPubKeyOrUsername;
}
this.submittingRemovePhone = true;
this.backendApi
.AdminUpdateUserGlobalMetadata(
this.globalVars.localNode,
this.globalVars.loggedInUser.PublicKeyBase58Check,
pubKey,
username,
false,
false,
false,
false,
false,
true
)
.subscribe(
(res) => {
this.updateProfileSuccessType = "phone";
this.clearSuccessTimeout = setTimeout(() => {
this.updateProfileSuccessType = "";
}, 1000);
},
(err) => {
this.globalVars._alertError(JSON.stringify(err.error));
}
)
.add(() => {
this.submittingRemovePhone = false;
});
}
extractError(err: any): string {
if (err.error != null && err.error.error != null) {
// Is it obvious yet that I'm not a frontend gal?
// TODO: Error handling between BE and FE needs a major redesign.
const rawError = err.error.error;
if (rawError.includes("password")) {
return Messages.INCORRECT_PASSWORD;
} else if (rawError.includes("not sufficient")) {
return Messages.INSUFFICIENT_BALANCE;
} else if (rawError.includes("RuleErrorTxnMustHaveAtLeastOneInput")) {
return Messages.SEND_DESO_MIN;
} else if (
(rawError.includes("SendDeSo: Problem") && rawError.includes("Invalid input format")) ||
rawError.includes("Checksum does not match")
) {
return Messages.INVALID_PUBLIC_KEY;
} else {
return rawError;
}
}
if (err.status != null && err.status !== 200) {
return Messages.CONNECTION_PROBLEM;
}
// If we get here we have no idea what went wrong so just alert the
// errorString.
return JSON.stringify(err);
}
updateGlobalParamUSDPerBitcoin() {
this.updatingUSDToBitcoin = true;
this.updateGlobalParams(this.updateGlobalParamsValues.USDPerBitcoin, -1, -1, -1, -1);
}
updateGlobalParamCreateProfileFee() {
this.updatingCreateProfileFee = true;
this.updateGlobalParams(-1, this.updateGlobalParamsValues.CreateProfileFeeNanos, -1, -1, -1);
}
updateGlobalParamMinimumNetworkFee() {
this.updatingMinimumNetworkFee = true;
this.updateGlobalParams(-1, -1, this.updateGlobalParamsValues.MinimumNetworkFeeNanosPerKB, -1, -1);
}
updateGlobalParamMaxCopiesPerNFT() {
this.updatingMaxCopiesPerNFT = true;
this.updateGlobalParams(-1, -1, -1, this.updateGlobalParamsValues.MaxCopiesPerNFT, -1);
}
updateGlobalParamCreateNFTFeeNanos() {
this.updatingCreateNFTFeeNanos = true;
this.updateGlobalParams(-1, -1, -1, -1, this.updateGlobalParamsValues.CreateNFTFeeNanos);
}
updateUSDToDeSoReserveExchangeRate(): void {
SwalHelper.fire({
target: this.globalVars.getTargetComponentSelector(),
title: "Are you ready?",
html: `You are about to update the reserve exchange rate of USD to DeSo to be $${this.usdToDeSoReserveExchangeRate}`,
showConfirmButton: true,
showCancelButton: true,
customClass: {
confirmButton: "btn btn-light",
cancelButton: "btn btn-light no",
},
reverseButtons: true,
}).then((res) => {
if (res.isConfirmed) {
this.submittingUSDToDeSoReserveExchangeRateUpdate = true;
this.backendApi
.SetUSDCentsToDeSoReserveExchangeRate(
this.globalVars.localNode,
this.globalVars.loggedInUser.PublicKeyBase58Check,
this.usdToDeSoReserveExchangeRate * 100
)
.subscribe(
(res: any) => {
console.log(res);
this.globalVars._alertSuccess(
sprintf("Successfully updated the reserve exchange to $%d/DeSo", res.USDCentsPerDeSo / 100)
);
},
(err: any) => {
this.globalVars._alertError(this.extractError(err));
}
)
.add(() => (this.submittingUSDToDeSoReserveExchangeRateUpdate = false));
}
});
}
updateBuyDeSoFeeRate(): void {
SwalHelper.fire({
target: this.globalVars.getTargetComponentSelector(),
title: "Are you ready?",
html: `You are about to update the Buy DeSo Fee to be ${this.buyDeSoFeeRate}%`,
showConfirmButton: true,
showCancelButton: true,
customClass: {
confirmButton: "btn btn-light",
cancelButton: "btn btn-light no",
},
reverseButtons: true,
}).then((res) => {
if (res.isConfirmed) {
this.submittingBuyDeSoFeeRate = true;
this.backendApi
.SetBuyDeSoFeeBasisPoints(
this.globalVars.localNode,
this.globalVars.loggedInUser.PublicKeyBase58Check,
this.buyDeSoFeeRate * 100
)
.subscribe(
(res: any) => {
console.log(res);
this.globalVars._alertSuccess(
sprintf("Successfully updated the Buy DeSo Fee to %d%", res.USDCentsPerDeSo / 100)
);
},
(err: any) => {
this.globalVars._alertError(this.extractError(err));
}
)
.add(() => (this.submittingBuyDeSoFeeRate = false));
}
});
}
updateGlobalParams(
usdPerBitcoin: number,
createProfileFeeNanos: number,
minimumNetworkFeeNanosPerKB: number,
maxCopiesPerNFT: number,
createNFTFeeNanos: number
) {
const updateBitcoinMessage = usdPerBitcoin >= 0 ? `Update Bitcoin to USD exchange rate: ${usdPerBitcoin}\n` : "";
const createProfileFeeNanosMessage =
createProfileFeeNanos >= 0 ? `Create Profile Fee (in $DESO): ${createProfileFeeNanos}\n` : "";
const minimumNetworkFeeNanosPerKBMessage =
minimumNetworkFeeNanosPerKB >= 0 ? `Minimum Network Fee Nanos Per KB: ${minimumNetworkFeeNanosPerKB}\n` : "";
const maxCopiesMessage = maxCopiesPerNFT >= 0 ? `Max Copies Per NFT: ${maxCopiesPerNFT}\n` : "";
const createNFTFeeNanosMessage = createNFTFeeNanos >= 0 ? `Create NFT Fee (in $DESO): ${createNFTFeeNanos}\n` : "";
SwalHelper.fire({
target: this.globalVars.getTargetComponentSelector(),
title: "Are you ready?",
html: `${updateBitcoinMessage}${createProfileFeeNanosMessage}${minimumNetworkFeeNanosPerKBMessage}${maxCopiesMessage}${createNFTFeeNanosMessage}`,
showConfirmButton: true,
showCancelButton: true,
customClass: {
confirmButton: "btn btn-light",
cancelButton: "btn btn-light no",
},
reverseButtons: true,
})
.then((res: any) => {
if (res.isConfirmed) {
this.updatingGlobalParams = true;
console.log(maxCopiesPerNFT);
this.backendApi
.UpdateGlobalParams(
this.globalVars.localNode,
this.globalVars.loggedInUser.PublicKeyBase58Check,
usdPerBitcoin >= 0 ? usdPerBitcoin * 100 : -1,
createProfileFeeNanos >= 0 ? createProfileFeeNanos * 1e9 : -1,
minimumNetworkFeeNanosPerKB >= 0 ? minimumNetworkFeeNanosPerKB : -1,
maxCopiesPerNFT >= 0 ? maxCopiesPerNFT : -1,
createNFTFeeNanos >= 0 ? createNFTFeeNanos * 1e9 : -1,
minimumNetworkFeeNanosPerKB >= 0
? minimumNetworkFeeNanosPerKB
: this.globalParams.MinimumNetworkFeeNanosPerKB >= 0
? this.globalParams.MinimumNetworkFeeNanosPerKB
: Math.floor(parseFloat(this.feeRateDeSoPerKB) * 1e9)
)
.subscribe(
(res: any) => {
if (res == null || res.FeeNanos == null) {
this.globalVars._alertError(Messages.CONNECTION_PROBLEM);
return null;
}
// Save the minimum network fee in case we update that value then update a different global param without
// updating the minimum network fee.
if (minimumNetworkFeeNanosPerKB >= 0) {
this.globalParams.MinimumNetworkFeeNanosPerKB = minimumNetworkFeeNanosPerKB;
}
const totalFeeDeSo = res.FeeNanos / 1e9;
this.globalVars._alertSuccess(
sprintf(
"Successfully updated global params rate. TxID: %s for a fee of %d DeSo",
res.TransactionIDBase58Check,
totalFeeDeSo
)
);
},
(error) => {
console.error(error);
this.globalVars._alertError(this.extractError(error));
}
)
.add(() => {
this.updatingGlobalParams = false;
});
}
})
.finally(() => {
this.updatingUSDToBitcoin = false;
this.updatingCreateProfileFee = false;
this.updatingMinimumNetworkFee = false;
this.updatingMaxCopiesPerNFT = false;
this.updatingCreateNFTFeeNanos = false;
});
}
reprocessBitcoinBlock() {
if (this.bitcoinBlockHashOrHeight === "") {
this.globalVars._alertError("Please enter either a Bitcoin block hash or a Bitcoin block height.");
return;
}
this.submittingReprocessRequest = true;
this.backendApi
.AdminReprocessBitcoinBlock(
this.globalVars.localNode,
this.globalVars.loggedInUser.PublicKeyBase58Check,
this.bitcoinBlockHashOrHeight
)
.subscribe(
(res: any) => {
if (res == null || res.Message == null) {
this.globalVars._alertError(Messages.CONNECTION_PROBLEM);
return null;
}
this.bitcoinBlockHashOrHeight = "";
this.globalVars._alertSuccess(res.Message);
},
(error) => {
console.error(error);
this.globalVars._alertError(this.extractError(error));
}
)
.add(() => {
this.submittingReprocessRequest = false;
});
}
evictBitcoinExchangeTxns(dryRun: boolean) {
SwalHelper.fire({
target: this.globalVars.getTargetComponentSelector(),
title: "Are you ready?",
html: `About to evict ${this.evictBitcoinTxnHashes} with DryRun=${dryRun}`,
showConfirmButton: true,
showCancelButton: true,
customClass: {
confirmButton: "btn btn-light",
cancelButton: "btn btn-light no",
},
reverseButtons: true,
})
.then((res: any) => {
if (res.isConfirmed) {
this.submittingEvictUnminedBitcoinTxns = true;
this.backendApi
.EvictUnminedBitcoinTxns(
this.globalVars.localNode,
this.globalVars.loggedInUser.PublicKeyBase58Check,
this.evictBitcoinTxnHashes.split(","),
dryRun
)
.subscribe(
(res: any) => {
if (res == null) {
this.globalVars._alertError(Messages.CONNECTION_PROBLEM);
return null;
}
this.globalVars._alertSuccess(
`Success! Lost ${res.TotalMempoolTxns - res.MempoolTxnsLeftAfterEviction} mempool
txns with ${res.TotalMempoolTxns} total txns in the mempool before eviction.
Types: ${JSON.stringify(res.TxnTypesEvicted, null, 2)}.
Check the response of this request in the browser's inspector for more information.`
);
},
(error) => {
console.error(error);
this.globalVars._alertError(this.extractError(error));
}
)
.add(() => {
this.submittingEvictUnminedBitcoinTxns = false;
});
}
})
.finally(() => {
this.submittingEvictUnminedBitcoinTxns = false;
});
}
grantVerificationBadge() {
if (this.usernameToVerify === "") {
this.globalVars._alertError("Please enter a valid username.");
return;
}
this.submittingVerifyRequest = true;
this.backendApi
.AdminGrantVerificationBadge(
this.globalVars.localNode,
this.globalVars.loggedInUser.PublicKeyBase58Check,
this.usernameToVerify
)
.subscribe(
(res: any) => {
this.globalVars._alertSuccess(res.Message);
},
(error) => {
this.globalVars._alertError(this.extractError(error));
}
)
.add(() => {
this.submittingVerifyRequest = false;
});
}
getUserAdminDataClicked() {
if (this.getUserAdminDataPublicKey === "") {
this.globalVars._alertError("Please enter a valid username.");
return;
}
this.submittingGetUserAdminData = true;
this.backendApi
.AdminGetUserAdminData(
this.globalVars.localNode,
this.globalVars.loggedInUser.PublicKeyBase58Check,
this.getUserAdminDataPublicKey
)
.subscribe(
(res: any) => {
this.getUserAdminDataResponse = res;
},
(error) => {
this.globalVars._alertError(this.extractError(error));
}
)
.add(() => {
this.submittingGetUserAdminData = false;
});
}
RemoveVerification() {
if (this.usernameForWhomToRemoveVerification === "") {
this.globalVars._alertError("Please enter a valid username.");
return;
}
this.submittingRemovalRequest = true;
this.backendApi
.AdminRemoveVerificationBadge(
this.globalVars.localNode,
this.globalVars.loggedInUser.PublicKeyBase58Check,
this.usernameForWhomToRemoveVerification
)
.subscribe(
(res: any) => {
this.globalVars._alertSuccess(res.Message);
},
(error) => {
this.globalVars._alertError(this.extractError(error));
}
)
.add(() => {
this.submittingRemovalRequest = false;
});
}
swapIdentity() {
if (this.swapIdentityFromUsernameOrPublicKey === "") {
this.globalVars._alertError("Please enter the username or public key of the user you are swapping *from*");
return;
}
if (this.swapIdentityToUsernameOrPublicKey === "") {
this.globalVars._alertError("Please enter the username or public key of the user you are swapping *to*");
return;
}
this.submittingSwapIdentity = true;
this.backendApi
.SwapIdentity(
this.globalVars.localNode,
this.globalVars.loggedInUser.PublicKeyBase58Check,
this.swapIdentityFromUsernameOrPublicKey,
this.swapIdentityToUsernameOrPublicKey,
Math.floor(parseFloat(this.feeRateDeSoPerKB) * 1e9) /*MinFeeRateNanosPerKB*/
)
.subscribe(
(res: any) => {
if (res == null) {
this.globalVars._alertError(Messages.CONNECTION_PROBLEM);
return null;
}
this.globalVars._alertSuccess("Identities successfully swapped!");
},
(error) => {
console.error(error);
this.globalVars._alertError(this.extractError(error));
}
)
.add(() => {
this.submittingSwapIdentity = false;
});
}
// GetUserMetadata
getUserMetadata() {
this.backendApi
.AdminGetUserGlobalMetadata(
this.globalVars.localNode,
this.globalVars.loggedInUser.PublicKeyBase58Check,
this.changeUsernamePublicKey
)
.subscribe(
(res) => {
this.userMetadataToUpdate = res.UserMetadata;
this.userProfileEntryResponseToUpdate = res.UserProfileEntryResponse;
this.searchedForPubKey = true;
},
(err) => {
console.log(err);
}
);
}
updateUsername() {
if (!this.searchedForPubKey) {
return SwalHelper.fire({
target: this.globalVars.getTargetComponentSelector(),
icon: "warning",
title: "Search for public key before updating username",
});
}
const infoMsg = this.userProfileEntryResponseToUpdate
? `Change ${this.userProfileEntryResponseToUpdate.Username} to ${this.usernameTarget}`
: `Set username to ${this.usernameTarget} for public key ${this.changeUsernamePublicKey}`;
SwalHelper.fire({
target: this.globalVars.getTargetComponentSelector(),
icon: "info",
title: `Updating Username`,
html: infoMsg,
showCancelButton: true,
showConfirmButton: true,
focusConfirm: true,
customClass: {
confirmButton: "btn btn-light",
cancelButton: "btn btn-light no",
},
reverseButtons: true,
}).then((res: any) => {
if (res.isConfirmed) {
this.submittingUpdateUsername = true;
const creatorCoinBasisPoints = this.userProfileEntryResponseToUpdate?.CoinEntry?.CreatorBasisPoints || 10 * 100;
const stakeMultipleBasisPoints =
this.userProfileEntryResponseToUpdate?.StakeMultipleBasisPoints || 1.25 * 100 * 100;
return this.backendApi
.UpdateProfile(
environment.verificationEndpointHostname,
this.globalVars.loggedInUser.PublicKeyBase58Check /*UpdaterPublicKeyBase58Check*/,
this.changeUsernamePublicKey /*ProfilePublicKeyBase58Check*/,
// Start params
this.usernameTarget /*NewUsername*/,
"" /*NewDescription*/,
"" /*NewProfilePic*/,
creatorCoinBasisPoints /*NewCreatorBasisPoints*/,
stakeMultipleBasisPoints /*NewStakeMultipleBasisPoints*/,
false /*IsHidden*/,
// End params
this.globalVars.feeRateDeSoPerKB * 1e9 /*MinFeeRateNanosPerKB*/
)
.subscribe(
() => {
this.updateProfileSuccessType = "username";
this.clearSuccessTimeout = setTimeout(() => {
this.updateProfileSuccessType = "";
}, 1000);
},
(err) => {
this.globalVars._alertError(JSON.stringify(err.error));
}
)
.add(() => {
this.submittingUpdateUsername = false;
});
}
});
}
} | the_stack |
import {createStore, createEvent, guard, Store, Event, Unit} from 'effector'
const typecheck = '{global}'
describe('explicit generics', () => {
test('guard<A>({source, clock, filter})', () => {
const source = createEvent<string | null>()
const clock = createEvent<number>()
const filter = createStore(true)
const withFilterStore: Event<string | null> = guard<string | null>({
source,
clock,
filter,
})
const withFilterFn: Event<string | null> = guard<string | null>({
source,
clock,
filter: e => e !== null,
})
const withFilterBoolean: Event<string> = guard<string | null>({
source,
clock,
filter: Boolean,
})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
test('guard<A>({source, filter})', () => {
const source = createEvent<string | null>()
const filter = createStore(true)
const withFilterStore: Event<string | null> = guard<string | null>({
source,
filter,
})
const withFilterFn: Event<string | null> = guard<string | null>({
source,
filter: e => e !== null,
})
const withFilterBoolean: Event<string> = guard<string | null>({
source,
filter: Boolean,
})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
test('guard<A>({clock, filter})', () => {
const clock = createEvent<string | null>()
const filter = createStore(true)
const withFilterStore: Event<string | null> = guard<string | null>({
clock,
filter,
})
const withFilterFn: Event<string | null> = guard<string | null>({
clock,
filter: e => e !== null,
})
const withFilterBoolean: Event<string> = guard<string | null>({
clock,
filter: Boolean,
})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
test('guard<A>({source, filter, target})', () => {
const source = createEvent<string | null>()
const filter = createStore(true)
const target = createEvent<string>()
const withFilterStore: Unit<string | null> = guard<string | null>({
source,
filter,
target,
})
const withFilterFn: Unit<string | null> = guard<string | null>({
source,
filter: e => e !== null,
target,
})
const withFilterBoolean: Unit<string> = guard<string | null>({
source,
filter: Boolean,
target,
})
expect(typecheck).toMatchInlineSnapshot(`
"
Type 'Target' is not assignable to type 'Unit<string | null>'.
Type 'Tuple<any>' is not assignable to type 'Unit<string | null>'.
Type 'Target' is not assignable to type 'Unit<string | null>'.
Type 'Tuple<any>' is not assignable to type 'Unit<string | null>'.
Type 'Target' is not assignable to type 'Unit<string>'.
Type 'Tuple<any>' is not assignable to type 'Unit<string>'.
"
`)
})
test('guard<Source, Result>({source, filter})', () => {
const source = createEvent<string | null>()
const clock = createEvent<string | null>()
const result1: Event<string> = guard<string | null, string>({
source,
clock,
filter: (e): e is string => e !== null,
})
const result2: Event<string> = guard<string | null, string>({
source,
filter: (e): e is string => e !== null,
})
const result3: Event<string> = guard<string | null, string>({
clock,
filter: (e): e is string => e !== null,
})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
test('guard<Source, Result>({source, filter, target})', () => {
const source = createEvent<string | null>()
const target = createEvent<string>()
const result: Unit<string> = guard<string | null, string>({
source,
filter: (e): e is string => e !== null,
target,
})
expect(typecheck).toMatchInlineSnapshot(`
"
Type 'Target' is not assignable to type 'Unit<string>'.
Type 'Tuple<any>' is not assignable to type 'Unit<string>'.
"
`)
})
})
describe('guard(source, config)', () => {
describe('guard(source, {filter: store})', () => {
it('return new event (should pass)', () => {
const trigger: Event<number> = createEvent()
const allow = createStore<boolean>(false)
const result: Event<number> = guard(trigger, {filter: allow})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
it('support any unit (should pass)', () => {
const trigger: Store<number[]> = createStore([1])
const allow = createStore<boolean>(false)
const result: Event<number[]> = guard(trigger, {filter: allow})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
test('store is not boolean (should fail)', () => {
const trigger: Event<number> = createEvent()
const allow: Store<string> = createStore('no')
//@ts-expect-error
guard(trigger, {filter: allow})
expect(typecheck).toMatchInlineSnapshot(`
"
No overload matches this call.
The last overload gave the following error.
Type 'Store<string>' is not assignable to type 'Store<boolean> | ((source: number) => boolean)'.
Type 'Store<string>' is not assignable to type 'Store<boolean>'.
The types returned by 'getState()' are incompatible between these types.
Type 'string' is not assignable to type 'boolean'.
"
`)
})
test('result type mismatch (should fail)', () => {
const trigger: Event<number> = createEvent()
const allow: Store<string> = createStore('no')
//@ts-expect-error
const result: Event<string> = guard(trigger, {filter: allow})
expect(typecheck).toMatchInlineSnapshot(`
"
No overload matches this call.
The last overload gave the following error.
Type 'Store<string>' is not assignable to type 'Store<boolean> | ((source: number) => boolean)'.
Type 'Store<string>' is not assignable to type 'Store<boolean>'.
"
`)
})
describe('support target field', () => {
it('allow to pass target field (should pass)', () => {
const trigger: Event<number> = createEvent()
const allow = createStore<boolean>(false)
const target: Store<number> = createStore(0)
guard(trigger, {
filter: allow,
target,
})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
test('type mismatch (should fail)', () => {
const trigger: Event<number> = createEvent()
const allow = createStore<boolean>(false)
const target: Store<string> = createStore('no')
guard(trigger, {
filter: allow,
//@ts-expect-error
target,
})
expect(typecheck).toMatchInlineSnapshot(`
"
No overload matches this call.
The last overload gave the following error.
Type 'Store<string>' is not assignable to type '\\"incompatible unit in target\\"'.
"
`)
})
})
})
describe('guard(source, {filter: fn})', () => {
it('returns new event (should pass)', () => {
const trigger: Event<number> = createEvent()
const result: Event<number> = guard(trigger, {filter: n => n > 0})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
test('result type mismatch (should fail)', () => {
const trigger: Event<number> = createEvent()
const allow: Store<string> = createStore('no')
//@ts-expect-error
const result: Event<string> = guard(trigger, {filter: x => x > 0})
expect(typecheck).toMatchInlineSnapshot(`
"
Type 'Event<number>' is not assignable to type 'Event<string>'.
"
`)
})
describe('support target field', () => {
it('allow to pass target field (should pass)', () => {
const trigger: Event<number> = createEvent()
const target: Store<number> = createStore(0)
guard(trigger, {
filter: x => x > 0,
target,
})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
test('type mismatch (should fail)', () => {
const trigger: Event<number> = createEvent()
const target: Store<string> = createStore('no')
guard(trigger, {
filter: x => x > 0,
//@ts-expect-error
target,
})
expect(typecheck).toMatchInlineSnapshot(`
"
No overload matches this call.
The last overload gave the following error.
Type 'Store<string>' is not assignable to type '\\"incompatible unit in target\\"'.
"
`)
})
})
})
describe('guard(source, {filter: Boolean})', () => {
it('returns new event (should pass)', () => {
type User = {name: string}
const trigger: Event<User | null> = createEvent()
const result: Event<User> = guard(trigger, {filter: Boolean})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
test('result type mismatch (should fail)', () => {
type User = {name: string}
const trigger: Event<User | null> = createEvent()
//@ts-expect-error
const result: Event<string> = guard(trigger, {filter: Boolean})
expect(typecheck).toMatchInlineSnapshot(`
"
Type 'Event<User>' is not assignable to type 'Event<string>'.
Types of property 'watch' are incompatible.
Type '(watcher: (payload: User) => any) => Subscription' is not assignable to type '(watcher: (payload: string) => any) => Subscription'.
Types of parameters 'watcher' and 'watcher' are incompatible.
Types of parameters 'payload' and 'payload' are incompatible.
Type 'User' is not assignable to type 'string'.
"
`)
})
describe('support target field', () => {
it('allow to pass target field (should pass)', () => {
type User = {name: string}
const trigger: Event<User | null> = createEvent()
const target = createStore<User>({name: 'alice'})
guard(trigger, {
filter: Boolean,
target,
})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
test('type mismatch (should fail)', () => {
type User = {name: string}
const trigger: Event<User | null> = createEvent()
const target: Store<string> = createStore('no')
guard(trigger, {
filter: Boolean,
//@ts-expect-error
target,
})
expect(typecheck).toMatchInlineSnapshot(`
"
No overload matches this call.
The last overload gave the following error.
Type 'Store<string>' is not assignable to type '\\"incompatible unit in target\\"'.
"
`)
})
})
})
})
describe('guard(config)', () => {
describe('guard({source, filter: store})', () => {
it('return new event (should pass)', () => {
const trigger: Event<number> = createEvent()
const allow = createStore<boolean>(false)
const result: Event<number> = guard({
source: trigger,
filter: allow,
})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
it('support any unit (should pass)', () => {
const trigger: Store<number[]> = createStore([1])
const allow = createStore<boolean>(false)
const result: Event<number[]> = guard({
source: trigger,
filter: allow,
})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
test('store is not boolean (should fail)', () => {
const trigger: Event<number> = createEvent()
const allow: Store<string> = createStore('no')
guard({
source: trigger,
//@ts-expect-error
filter: allow,
})
expect(typecheck).toMatchInlineSnapshot(`
"
No overload matches this call.
The last overload gave the following error.
Type 'Store<string>' is not assignable to type 'Store<boolean> | ((source: number) => boolean)'.
Type 'Store<string>' is not assignable to type 'Store<boolean>'.
"
`)
})
test('result type mismatch (should fail)', () => {
const trigger: Event<number> = createEvent()
const allow: Store<string> = createStore('no')
guard({
source: trigger,
//@ts-expect-error
filter: allow,
})
expect(typecheck).toMatchInlineSnapshot(`
"
No overload matches this call.
The last overload gave the following error.
Type 'Store<string>' is not assignable to type 'Store<boolean> | ((source: number) => boolean)'.
Type 'Store<string>' is not assignable to type 'Store<boolean>'.
"
`)
})
describe('support target field', () => {
it('allow to pass target field (should pass)', () => {
const trigger: Event<number> = createEvent()
const allow = createStore<boolean>(false)
const target: Store<number> = createStore(0)
guard({
source: trigger,
filter: allow,
target,
})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
test('type mismatch (should fail)', () => {
const trigger: Event<number> = createEvent()
const allow = createStore<boolean>(false)
const target: Store<string> = createStore('no')
guard({
source: trigger,
filter: allow,
//@ts-expect-error
target,
})
expect(typecheck).toMatchInlineSnapshot(`
"
No overload matches this call.
The last overload gave the following error.
Type 'Store<string>' is not assignable to type '\\"incompatible unit in target\\"'.
"
`)
})
})
})
describe('guard({source, filter: fn})', () => {
it('returns new event (should pass)', () => {
const trigger: Event<number> = createEvent()
const result: Event<number> = guard({
source: trigger,
filter: n => n > 0,
})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
test('result type mismatch (should fail)', () => {
const trigger: Event<number> = createEvent()
const allow: Store<string> = createStore('no')
//@ts-expect-error
const result: Event<string> = guard({
source: trigger,
filter: n => n > 0,
})
expect(typecheck).toMatchInlineSnapshot(`
"
Type 'Event<number>' is not assignable to type 'Event<string>'.
"
`)
})
describe('support target field', () => {
it('allow to pass target field (should pass)', () => {
const trigger: Event<number> = createEvent()
const target: Store<number> = createStore(0)
guard({
source: trigger,
filter: x => x > 0,
target,
})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
test('type mismatch (should fail)', () => {
const trigger: Event<number> = createEvent()
const target: Store<string> = createStore('no')
guard({
source: trigger,
filter: x => x > 0,
//@ts-expect-error
target,
})
expect(typecheck).toMatchInlineSnapshot(`
"
No overload matches this call.
The last overload gave the following error.
Type 'Store<string>' is not assignable to type '\\"incompatible unit in target\\"'.
"
`)
})
describe('any to void', () => {
test('with store (should pass)', () => {
const filter = createStore(true)
const source = createEvent<string>()
const target = createEvent<void>()
guard({
source,
filter,
target,
})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
test('with function (should pass)', () => {
const source = createEvent<{pass: boolean}>()
const target = createEvent<void>()
guard({
source,
filter: ({pass}) => pass,
target,
})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
})
})
})
describe('guard({source, filter: Boolean})', () => {
it('returns new event (should pass)', () => {
type User = {name: string}
const trigger: Event<User | null> = createEvent()
const result: Event<User> = guard({
source: trigger,
filter: Boolean,
})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
test('result type mismatch (should fail)', () => {
type User = {name: string}
const trigger: Event<User> = createEvent()
//@ts-expect-error
const result: Event<string> = guard({
source: trigger,
filter: Boolean,
})
expect(typecheck).toMatchInlineSnapshot(`
"
Type 'Event<User>' is not assignable to type 'Event<string>'.
Types of property 'watch' are incompatible.
Type '(watcher: (payload: User) => any) => Subscription' is not assignable to type '(watcher: (payload: string) => any) => Subscription'.
Types of parameters 'watcher' and 'watcher' are incompatible.
Types of parameters 'payload' and 'payload' are incompatible.
Type 'User' is not assignable to type 'string'.
"
`)
})
describe('support target field', () => {
it('allow to pass target field (should pass)', () => {
type User = {name: string}
const trigger: Event<User | null> = createEvent()
const target: Store<User> = createStore({name: 'alice'})
guard({
source: trigger,
filter: Boolean,
target,
})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
test('type mismatch (should fail)', () => {
type User = {name: string}
const trigger: Event<User> = createEvent()
const target: Store<string> = createStore('no')
guard({
source: trigger,
filter: Boolean,
//@ts-expect-error
target,
})
expect(typecheck).toMatchInlineSnapshot(`
"
No overload matches this call.
The last overload gave the following error.
Type 'Store<string>' is not assignable to type '\\"incompatible unit in target\\"'.
"
`)
})
test('any to void (should pass)', () => {
const source = createEvent<{pass: boolean}>()
const target = createEvent<void>()
guard({
source,
filter: Boolean,
target,
})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
})
})
})
})
test('guard return type supports union types (should pass)', () => {
const trigger: Event<{a: 1} | {a: 2}> = createEvent()
const allow = createStore<boolean>(false)
const result: Event<{a: 1} | {a: 2}> = guard(trigger, {filter: allow})
expect(typecheck).toMatchInlineSnapshot(`
"
no errors
"
`)
}) | the_stack |
import orderBy from "lodash-es/orderBy";
import { maybeDeepCopy, mergeByPk } from "./helpers";
import { team } from "../../core";
import { idb } from "..";
import { g, helpers } from "../../util";
import type {
Team,
TeamAttr,
TeamFiltered,
TeamSeasonAttr,
TeamStatAttr,
TeamStatType,
TeamStats,
GetCopyType,
} from "../../../common/types";
import { DEFAULT_POINTS_FORMULA } from "../../../common";
const processAttrs = <
Attrs extends Readonly<TeamAttr[]>,
SeasonAttrs extends Readonly<TeamSeasonAttr[]> | undefined,
StatAttrs extends Readonly<TeamStatAttr[]> | undefined,
Season extends number | undefined,
>(
output: TeamFiltered<Attrs, SeasonAttrs, StatAttrs, Season>,
t: Team,
attrs: Attrs,
) => {
for (const attr of attrs) {
if (attr === "budget") {
// Always copy, because we mutate below to convert units
output.budget = helpers.deepCopy(t.budget);
for (const [key, value] of Object.entries(output.budget)) {
if (key !== "ticketPrice") {
// ticketPrice is the only thing in dollars always
value.amount /= 1000;
}
}
} else {
// @ts-expect-error
output[attr] = t[attr];
}
}
};
const processSeasonAttrs = async <
Attrs extends Readonly<TeamAttr[]> | undefined,
SeasonAttrs extends Readonly<TeamSeasonAttr[]>,
StatAttrs extends Readonly<TeamStatAttr[]> | undefined,
Season extends number | undefined,
>(
output: TeamFiltered<Attrs, SeasonAttrs, StatAttrs, Season>,
t: Team,
seasonAttrs: SeasonAttrs,
addDummySeason: boolean,
season: Season,
type: GetCopyType | undefined,
) => {
let seasons;
if (season === undefined) {
// All seasons
seasons = mergeByPk(
await idb.league
.transaction("teamSeasons")
.store.index("tid, season")
.getAll(IDBKeyRange.bound([t.tid], [t.tid, ""])),
await idb.cache.teamSeasons.indexGetAll("teamSeasonsByTidSeason", [
[t.tid],
[t.tid, "Z"],
]),
"teamSeasons",
type,
);
} else if (season >= g.get("season") - 2) {
// Single season, from cache
seasons = await idb.cache.teamSeasons.indexGetAll(
"teamSeasonsBySeasonTid",
[
[season, t.tid],
[season, t.tid],
],
);
} else {
// Single season, from database
seasons = await idb.league
.transaction("teamSeasons")
.store.index("season, tid")
.getAll([season as number, t.tid]);
}
// If a season is requested but not in the database, make a fake season so at least some dummy values are returned
if (season !== undefined && seasons.length === 0 && addDummySeason) {
const dummySeason = team.genSeasonRow(t);
dummySeason.season = season as number;
seasons = [dummySeason];
}
// 2020-05-02 - these were moved into TeamSeason, but I'm wary of doing an IDB upgrade, so here we are.
const copyFromTeamIfUndefined = [
"cid",
"did",
"region",
"name",
"abbrev",
"imgURL",
"colors",
];
// @ts-expect-error
output.seasonAttrs = await Promise.all(
seasons.map(async ts => {
const row: any = {}; // Revenue and expenses calculation
const revenue = helpers
.keys(ts.revenues)
.reduce((memo, rev) => memo + ts.revenues[rev].amount, 0);
const expense = helpers
.keys(ts.expenses)
.reduce((memo, rev) => memo + ts.expenses[rev].amount, 0);
for (const temp of seasonAttrs) {
const attr: string = temp;
if (attr === "winp") {
row.winp = helpers.calcWinp(ts);
} else if (attr === "att") {
row.att = 0;
if (!ts.hasOwnProperty("gpHome")) {
ts.gpHome = Math.round(ts.gp / 2);
}
// See also game.js and teamFinances.js
if (ts.gpHome > 0) {
row.att = ts.att / ts.gpHome;
}
} else if (attr === "cash") {
row.cash = ts.cash / 1000; // [millions of dollars]
} else if (attr === "revenue") {
row.revenue = revenue / 1000; // [millions of dollars]
} else if (attr === "profit") {
row.profit = (revenue - expense) / 1000; // [millions of dollars]
} else if (attr === "salaryPaid") {
row.salaryPaid = ts.expenses.salary.amount / 1000; // [millions of dollars]
} else if (attr === "payroll") {
if (season === g.get("season")) {
row.payroll = (await team.getPayroll(t.tid)) / 1000;
} else {
row.payroll = undefined;
}
} else if (attr === "lastTen") {
const lastTenWon = ts.lastTen.filter(x => x === 1).length;
const lastTenLost = ts.lastTen.filter(x => x === 0).length;
const lastTenOTL = ts.lastTen.filter(x => x === "OTL").length;
const lastTenTied = ts.lastTen.filter(x => x === -1).length;
row.lastTen = `${lastTenWon}-${lastTenLost}`;
if (lastTenOTL > 0) {
row.lastTen += `-${lastTenOTL}`;
}
if (lastTenTied > 0) {
row.lastTen += `-${lastTenTied}`;
}
} else if (attr === "streak") {
// For standings
if (ts.streak === 0) {
row.streak = "None";
} else if (ts.streak > 0) {
row.streak = `Won ${ts.streak}`;
} else if (ts.streak < 0) {
row.streak = `Lost ${Math.abs(ts.streak)}`;
}
} else if (attr === "pts") {
row.pts = team.evaluatePointsFormula(ts, {
season: ts.season,
});
} else if (attr === "ptsMax") {
row.ptsMax = team.ptsMax(ts);
} else if (attr === "ptsPct") {
row.ptsPct = team.ptsPct(ts);
} else if (attr === "ptsDefault") {
row.ptsDefault = team.evaluatePointsFormula(ts, {
formula: DEFAULT_POINTS_FORMULA,
season: ts.season,
});
} else if (attr === "avgAge") {
// Will be undefined if not cached, in which case will need to be dynamically computed elsewhere
row.avgAge = ts[attr];
} else {
// @ts-expect-error
row[attr] = ts[attr];
}
if (row[attr] === undefined && copyFromTeamIfUndefined.includes(attr)) {
// @ts-expect-error
row[attr] = t[attr];
}
}
return row;
}),
);
if (season !== undefined) {
// @ts-expect-error
output.seasonAttrs = output.seasonAttrs[0];
}
};
// Indexes can't handle playoffs/regularSeason and different ones can come back inconsistently sorted
const filterOrderStats = (
stats: TeamStats[],
playoffs: boolean,
regularSeason: boolean,
type: GetCopyType | undefined,
): TeamStats[] => {
return orderBy(
maybeDeepCopy(
stats.filter(ts => {
if (playoffs && ts.playoffs) {
return true;
}
if (regularSeason && !ts.playoffs) {
return true;
}
return false;
}),
type,
),
["season", "playoffs", "rid"],
);
};
const processStats = async <
Attrs extends Readonly<TeamAttr[]> | undefined,
SeasonAttrs extends Readonly<TeamSeasonAttr[]> | undefined,
StatAttrs extends Readonly<TeamStatAttr[]>,
Season extends number | undefined,
>(
output: TeamFiltered<Attrs, SeasonAttrs, StatAttrs, Season>,
t: Team,
stats: StatAttrs,
playoffs: boolean,
regularSeason: boolean,
statType: TeamStatType,
addDummySeason: boolean,
showNoStats: boolean,
season: Season | undefined,
type: GetCopyType | undefined,
) => {
let teamStats;
const teamStatsFromCache = async () => {
// Single season, from cache
let teamStats2: TeamStats[] = [];
if (regularSeason) {
teamStats2 = teamStats2.concat(
await idb.cache.teamStats.indexGetAll("teamStatsByPlayoffsTid", [
[false, t.tid],
[false, t.tid],
]),
);
}
if (playoffs) {
teamStats2 = teamStats2.concat(
await idb.cache.teamStats.indexGetAll("teamStatsByPlayoffsTid", [
[true, t.tid],
[true, t.tid],
]),
);
}
return teamStats2;
};
if (season === undefined) {
// All seasons
teamStats = mergeByPk(
await idb.league
.transaction("teamStats")
.store.index("tid")
.getAll(t.tid),
await teamStatsFromCache(),
"teamStats",
type,
);
} else if (season === g.get("season")) {
teamStats = await teamStatsFromCache();
} else {
// Single season, from database
teamStats = await idb.league
.transaction("teamStats")
.store.index("season, tid")
.getAll([season as number, t.tid]);
}
// Handle playoffs/regularSeason
teamStats = filterOrderStats(teamStats, playoffs, regularSeason, type);
if (teamStats.length === 0 && (addDummySeason || showNoStats)) {
teamStats.push({});
}
// @ts-expect-error
output.stats = teamStats.map(ts => {
return team.processStats(ts, stats, playoffs, statType);
});
if (
season !== undefined &&
((playoffs && !regularSeason) || (!playoffs && regularSeason))
) {
// @ts-expect-error
output.stats = output.stats[0];
}
};
const processTeam = async <
Attrs extends Readonly<TeamAttr[]> | undefined,
SeasonAttrs extends Readonly<TeamSeasonAttr[]> | undefined,
StatAttrs extends Readonly<TeamStatAttr[]> | undefined,
Season extends number | undefined,
>(
t: Team,
{
season,
attrs,
seasonAttrs,
stats,
playoffs,
regularSeason,
statType,
addDummySeason,
showNoStats,
type,
}: {
season?: number;
attrs?: Attrs;
seasonAttrs?: SeasonAttrs;
stats?: StatAttrs;
playoffs: boolean;
regularSeason: boolean;
statType: TeamStatType;
addDummySeason: boolean;
showNoStats: boolean;
type: GetCopyType | undefined;
},
) => {
// @ts-expect-error
const output: TeamFiltered<Attrs, SeasonAttrs, StatAttrs, Season> = {};
if (attrs) {
// @ts-expect-error
processAttrs(output, t, attrs);
}
const promises: Promise<any>[] = [];
if (seasonAttrs) {
promises.push(
// @ts-expect-error
processSeasonAttrs(output, t, seasonAttrs, addDummySeason, season, type),
);
}
if (stats) {
promises.push(
processStats(
// @ts-expect-error
output,
t,
stats,
playoffs,
regularSeason,
statType,
addDummySeason,
showNoStats,
season,
type,
),
);
}
await Promise.all(promises);
if (seasonAttrs && (output as never as any).seasonAttrs === undefined) {
return;
}
if (stats && (output as never as any).stats === undefined) {
return;
}
return output;
};
/**
* Retrieve a filtered copy of a team object, or an array of all team objects.
*
* This can be used to retrieve information about a certain season, compute average statistics from the raw data, etc.
*
* If you request just one season (so, explicitly set season and then only one of playoffs and regularSeason is true), then stats and seasonAttrs will be returned as an object. Otherwise, they will be arrays of objects.
*
* @memberOf core.team
* @param {Object} options Options, as described below.
* @param {number=} options.tid Team ID. Set this if you want to return only one team object. If undefined, an array of all teams is returned, ordered by tid.
* @param {number=} options.season Season to retrieve stats/seasonAttrs for. If undefined, all seasons will be returned.
* @param {Array.<string>=} options.attrs List of team attributes to include in output (e.g. region, abbrev, name, ...).
* @param {Array.<string>=} options.seasonAttrs List of seasonal team attributes to include in output (e.g. won, lost, payroll, ...).
* @param {Array.<string=>} options.stats List of team stats to include in output (e.g. fg, orb, ast, blk, ...).
* @param {boolean=} options.playoffs Boolean representing whether to return playoff stats or not; default is false.
* @param {boolean=} options.regularSeason Boolean representing whether to return playoff stats or not; default is false.
* @param {string=} options.statType What type of stats to return, 'perGame' or 'totals' (default is 'perGame).
* @param {boolean=} options.addDummySeason If season is specified, should we return data for this team if it is missing a TeamStats or TeamSeasons entry?
* @param {boolean=} options.showNoStats If season is specified, should we return data for this team if it is missing a TeamStats entry, but has a TeamSeason entry? Since TeamStats is now lazily created, this is needed to return anything in preseason or regular season before games are played.
* @return {Promise.(Object|Array.<Object>)} Filtered team object or array of filtered team objects, depending on the inputs.
*/
async function getCopies<
Attrs extends Readonly<TeamAttr[]> | undefined,
SeasonAttrs extends Readonly<TeamSeasonAttr[]> | undefined,
StatAttrs extends Readonly<TeamStatAttr[]> | undefined,
Season extends number | undefined = undefined,
>(
{
tid,
season,
attrs,
seasonAttrs,
stats,
playoffs = false,
regularSeason = true,
statType = "perGame",
addDummySeason = false,
active,
showNoStats = false,
}: {
tid?: number;
season?: Season;
attrs?: Attrs;
seasonAttrs?: SeasonAttrs;
stats?: StatAttrs;
playoffs?: boolean;
regularSeason?: boolean;
statType?: TeamStatType;
addDummySeason?: boolean;
active?: true;
showNoStats?: boolean;
} = {},
type?: GetCopyType,
): Promise<TeamFiltered<Attrs, SeasonAttrs, StatAttrs, Season>[]> {
const options = {
season,
attrs,
seasonAttrs,
stats,
playoffs,
regularSeason,
statType,
addDummySeason,
showNoStats,
type,
};
if (tid === undefined) {
let teams = await idb.cache.teams.getAll();
if (active) {
if (season !== undefined && season !== g.get("season")) {
throw new Error(
"Don't call teamsPlus with active=true for any season other than the current season",
);
}
teams = teams.filter(t => !t.disabled);
}
// @ts-expect-error
return (await Promise.all(teams.map(t => processTeam(t, options)))).filter(
x => x !== undefined,
);
}
const t = await idb.cache.teams.get(tid);
if (t) {
const val = await processTeam(t, options);
if (val) {
// @ts-expect-error
return [val];
}
}
return [];
}
export default getCopies; | the_stack |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.